1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2025-02-04 06:08:26 +02:00

Merge branch 'master' into release/3.1

Merged-by: James Almer <jamrial@gmail.com>
This commit is contained in:
James Almer 2016-06-26 15:14:17 -03:00
commit 104c357b6a
58 changed files with 1305 additions and 582 deletions

1
.gitignore vendored
View File

@ -32,3 +32,4 @@
/avversion.h
/lcov/
/src
/mapfile

View File

@ -2,6 +2,10 @@ Entries are sorted chronologically from oldest to youngest within each release,
releases are sorted from youngest to oldest.
version <next>:
- YUY2 Lossless Codec decoder
version 3.1:
- DXVA2-accelerated HEVC Main10 decoding
- fieldhint filter
- loop video filter and aloop audio filter

View File

@ -184,7 +184,7 @@ clean::
distclean::
$(RM) $(DISTCLEANSUFFIXES)
$(RM) config.* .config libavutil/avconfig.h .version avversion.h version.h libavutil/ffversion.h libavcodec/codec_names.h libavcodec/bsf_list.c libavformat/protocol_list.c
$(RM) config.* .config libavutil/avconfig.h .version mapfile avversion.h version.h libavutil/ffversion.h libavcodec/codec_names.h libavcodec/bsf_list.c libavformat/protocol_list.c
ifeq ($(SRC_LINK),src)
$(RM) src
endif

3
configure vendored
View File

@ -5970,8 +5970,7 @@ enabled vdpau &&
disable vdpau
enabled vdpau && enabled xlib &&
check_func_headers "vdpau/vdpau.h vdpau/vdpau_x11.h" vdp_device_create_x11 -lvdpau &&
prepend ffmpeg_libs $($ldflags_filter "-lvdpau") &&
check_lib2 "vdpau/vdpau.h vdpau/vdpau_x11.h" vdp_device_create_x11 -lvdpau &&
enable vdpau_x11
# Funny iconv installations are not unusual, so check it after all flags have been set

View File

@ -15,6 +15,12 @@ libavutil: 2015-08-28
API changes, most recent first:
2016-06-26 - xxxxxxx / 1c9e861 - lavu 55.27.100 / 55.13.0 - hwcontext.h
Add av_hwdevice_ctx_create().
2016-06-26 - xxxxxxx / e47b8bb - lavc 57.48.101 / 57.19.1 - avcodec.h
Adjust values for JPEG 2000 profiles.
-------- 8< --------- FFmpeg 3.1 was cut here -------- 8< ---------
2016-06-23 - 5d75e46 / db7968b - lavf 57.40.100 / 57.7.0 - avio.h

View File

@ -61,9 +61,6 @@ DEFINE_GUID(DXVA2_ModeVP9_VLD_Profile0, 0x463707f8, 0xa1d0,0x4585,0x87,0x6d,0x83
DEFINE_GUID(DXVA2_NoEncrypt, 0x1b81beD0, 0xa0c7,0x11d3,0xb9,0x84,0x00,0xc0,0x4f,0x2e,0x73,0xc5);
DEFINE_GUID(GUID_NULL, 0x00000000, 0x0000,0x0000,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00);
typedef IDirect3D9* WINAPI pDirect3DCreate9(UINT);
typedef HRESULT WINAPI pCreateDeviceManager9(UINT *, IDirect3DDeviceManager9 **);
typedef struct dxva2_mode {
const GUID *guid;
enum AVCodecID codec;
@ -96,16 +93,6 @@ static const dxva2_mode dxva2_modes[] = {
{ NULL, 0 },
};
typedef struct DXVA2DevicePriv {
HMODULE d3dlib;
HMODULE dxva2lib;
HANDLE deviceHandle;
IDirect3D9 *d3d9;
IDirect3DDevice9 *d3d9device;
} DXVA2DevicePriv;
typedef struct DXVA2Context {
IDirectXVideoDecoder *decoder;
@ -119,32 +106,6 @@ typedef struct DXVA2Context {
AVBufferRef *hw_frames_ctx;
} DXVA2Context;
static void dxva2_device_uninit(AVHWDeviceContext *ctx)
{
AVDXVA2DeviceContext *hwctx = ctx->hwctx;
DXVA2DevicePriv *priv = ctx->user_opaque;
if (hwctx->devmgr && priv->deviceHandle != INVALID_HANDLE_VALUE)
IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, priv->deviceHandle);
if (hwctx->devmgr)
IDirect3DDeviceManager9_Release(hwctx->devmgr);
if (priv->d3d9device)
IDirect3DDevice9_Release(priv->d3d9device);
if (priv->d3d9)
IDirect3D9_Release(priv->d3d9);
if (priv->d3dlib)
FreeLibrary(priv->d3dlib);
if (priv->dxva2lib)
FreeLibrary(priv->dxva2lib);
av_freep(&ctx->user_opaque);
}
static void dxva2_uninit(AVCodecContext *s)
{
InputStream *ist = s->opaque;
@ -201,17 +162,11 @@ static int dxva2_alloc(AVCodecContext *s)
InputStream *ist = s->opaque;
int loglevel = (ist->hwaccel_id == HWACCEL_AUTO) ? AV_LOG_VERBOSE : AV_LOG_ERROR;
DXVA2Context *ctx;
pDirect3DCreate9 *createD3D = NULL;
pCreateDeviceManager9 *createDeviceManager = NULL;
HANDLE device_handle;
HRESULT hr;
D3DPRESENT_PARAMETERS d3dpp = {0};
D3DDISPLAYMODE d3ddm;
unsigned resetToken = 0;
UINT adapter = D3DADAPTER_DEFAULT;
AVHWDeviceContext *device_ctx;
AVDXVA2DeviceContext *device_hwctx;
DXVA2DevicePriv *device_priv;
int ret;
ctx = av_mallocz(sizeof(*ctx));
@ -223,102 +178,29 @@ static int dxva2_alloc(AVCodecContext *s)
ist->hwaccel_get_buffer = dxva2_get_buffer;
ist->hwaccel_retrieve_data = dxva2_retrieve_data;
ctx->hw_device_ctx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_DXVA2);
if (!ctx->hw_device_ctx)
ret = av_hwdevice_ctx_create(&ctx->hw_device_ctx, AV_HWDEVICE_TYPE_DXVA2,
ist->hwaccel_device, NULL, 0);
if (ret < 0)
goto fail;
device_ctx = (AVHWDeviceContext*)ctx->hw_device_ctx->data;
device_hwctx = device_ctx->hwctx;
device_priv = av_mallocz(sizeof(*device_priv));
if (!device_priv)
goto fail;
device_ctx->user_opaque = device_priv;
device_ctx->free = dxva2_device_uninit;
device_priv->deviceHandle = INVALID_HANDLE_VALUE;
device_priv->d3dlib = LoadLibrary("d3d9.dll");
if (!device_priv->d3dlib) {
av_log(NULL, loglevel, "Failed to load D3D9 library\n");
goto fail;
}
device_priv->dxva2lib = LoadLibrary("dxva2.dll");
if (!device_priv->dxva2lib) {
av_log(NULL, loglevel, "Failed to load DXVA2 library\n");
goto fail;
}
createD3D = (pDirect3DCreate9 *)GetProcAddress(device_priv->d3dlib, "Direct3DCreate9");
if (!createD3D) {
av_log(NULL, loglevel, "Failed to locate Direct3DCreate9\n");
goto fail;
}
createDeviceManager = (pCreateDeviceManager9 *)GetProcAddress(device_priv->dxva2lib, "DXVA2CreateDirect3DDeviceManager9");
if (!createDeviceManager) {
av_log(NULL, loglevel, "Failed to locate DXVA2CreateDirect3DDeviceManager9\n");
goto fail;
}
device_priv->d3d9 = createD3D(D3D_SDK_VERSION);
if (!device_priv->d3d9) {
av_log(NULL, loglevel, "Failed to create IDirect3D object\n");
goto fail;
}
if (ist->hwaccel_device) {
adapter = atoi(ist->hwaccel_device);
av_log(NULL, AV_LOG_INFO, "Using HWAccel device %d\n", adapter);
}
IDirect3D9_GetAdapterDisplayMode(device_priv->d3d9, adapter, &d3ddm);
d3dpp.Windowed = TRUE;
d3dpp.BackBufferWidth = 640;
d3dpp.BackBufferHeight = 480;
d3dpp.BackBufferCount = 0;
d3dpp.BackBufferFormat = d3ddm.Format;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.Flags = D3DPRESENTFLAG_VIDEO;
hr = IDirect3D9_CreateDevice(device_priv->d3d9, adapter, D3DDEVTYPE_HAL, GetDesktopWindow(),
D3DCREATE_SOFTWARE_VERTEXPROCESSING | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
&d3dpp, &device_priv->d3d9device);
hr = IDirect3DDeviceManager9_OpenDeviceHandle(device_hwctx->devmgr,
&device_handle);
if (FAILED(hr)) {
av_log(NULL, loglevel, "Failed to create Direct3D device\n");
av_log(NULL, loglevel, "Failed to open a device handle\n");
goto fail;
}
hr = createDeviceManager(&resetToken, &device_hwctx->devmgr);
if (FAILED(hr)) {
av_log(NULL, loglevel, "Failed to create Direct3D device manager\n");
goto fail;
}
hr = IDirect3DDeviceManager9_ResetDevice(device_hwctx->devmgr, device_priv->d3d9device, resetToken);
if (FAILED(hr)) {
av_log(NULL, loglevel, "Failed to bind Direct3D device to device manager\n");
goto fail;
}
hr = IDirect3DDeviceManager9_OpenDeviceHandle(device_hwctx->devmgr, &device_priv->deviceHandle);
if (FAILED(hr)) {
av_log(NULL, loglevel, "Failed to open device handle\n");
goto fail;
}
hr = IDirect3DDeviceManager9_GetVideoService(device_hwctx->devmgr, device_priv->deviceHandle, &IID_IDirectXVideoDecoderService, (void **)&ctx->decoder_service);
hr = IDirect3DDeviceManager9_GetVideoService(device_hwctx->devmgr, device_handle,
&IID_IDirectXVideoDecoderService,
(void **)&ctx->decoder_service);
IDirect3DDeviceManager9_CloseDeviceHandle(device_hwctx->devmgr, device_handle);
if (FAILED(hr)) {
av_log(NULL, loglevel, "Failed to create IDirectXVideoDecoderService\n");
goto fail;
}
ret = av_hwdevice_ctx_init(ctx->hw_device_ctx);
if (ret < 0) {
av_log(NULL, loglevel, "Failed to initialize the HW device context\n");
goto fail;
}
ctx->tmp_frame = av_frame_alloc();
if (!ctx->tmp_frame)
goto fail;

View File

@ -523,102 +523,14 @@ fail:
static AVClass *vaapi_log = &vaapi_class;
static av_cold void vaapi_device_uninit(AVHWDeviceContext *hwdev)
{
AVVAAPIDeviceContext *hwctx = hwdev->hwctx;
av_log(&vaapi_log, AV_LOG_VERBOSE, "Terminating VAAPI connection.\n");
vaTerminate(hwctx->display);
}
av_cold int vaapi_device_init(const char *device)
{
AVHWDeviceContext *hwdev;
AVVAAPIDeviceContext *hwctx;
VADisplay display;
VAStatus vas;
int major, minor, err;
int err;
display = 0;
#if HAVE_VAAPI_X11
if (!display) {
Display *x11_display;
// Try to open the device as an X11 display.
x11_display = XOpenDisplay(device);
if (!x11_display) {
av_log(&vaapi_log, AV_LOG_WARNING, "Cannot open X11 display "
"%s.\n", XDisplayName(device));
} else {
display = vaGetDisplay(x11_display);
if (!display) {
av_log(&vaapi_log, AV_LOG_WARNING, "Cannot open a VA display "
"from X11 display %s.\n", XDisplayName(device));
XCloseDisplay(x11_display);
} else {
av_log(&vaapi_log, AV_LOG_VERBOSE, "Opened VA display via "
"X11 display %s.\n", XDisplayName(device));
}
}
}
#endif
#if HAVE_VAAPI_DRM
if (!display && device) {
int drm_fd;
// Try to open the device as a DRM path.
drm_fd = open(device, O_RDWR);
if (drm_fd < 0) {
av_log(&vaapi_log, AV_LOG_WARNING, "Cannot open DRM device %s.\n",
device);
} else {
display = vaGetDisplayDRM(drm_fd);
if (!display) {
av_log(&vaapi_log, AV_LOG_WARNING, "Cannot open a VA display "
"from DRM device %s.\n", device);
close(drm_fd);
} else {
av_log(&vaapi_log, AV_LOG_VERBOSE, "Opened VA display via "
"DRM device %s.\n", device);
}
}
}
#endif
if (!display) {
av_log(&vaapi_log, AV_LOG_ERROR, "No VA display found for "
"device %s.\n", device);
return AVERROR(EINVAL);
}
vas = vaInitialize(display, &major, &minor);
if (vas != VA_STATUS_SUCCESS) {
av_log(&vaapi_log, AV_LOG_ERROR, "Failed to initialise VAAPI "
"connection: %d (%s).\n", vas, vaErrorStr(vas));
return AVERROR(EIO);
}
av_log(&vaapi_log, AV_LOG_VERBOSE, "Initialised VAAPI connection: "
"version %d.%d\n", major, minor);
hw_device_ctx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VAAPI);
if (!hw_device_ctx) {
av_log(&vaapi_log, AV_LOG_ERROR, "Failed to create VAAPI "
"hardware context.\n");
vaTerminate(display);
return AVERROR(ENOMEM);
}
hwdev = (AVHWDeviceContext*)hw_device_ctx->data;
hwdev->free = &vaapi_device_uninit;
hwctx = hwdev->hwctx;
hwctx->display = display;
err = av_hwdevice_ctx_init(hw_device_ctx);
err = av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI,
device, NULL, 0);
if (err < 0) {
av_log(&vaapi_log, AV_LOG_ERROR, "Failed to initialise VAAPI "
"hardware context: %d\n", err);
av_log(&vaapi_log, AV_LOG_ERROR, "Failed to create a VAAPI device\n");
return err;
}

View File

@ -18,11 +18,6 @@
#include <stdint.h>
#include <vdpau/vdpau.h>
#include <vdpau/vdpau_x11.h>
#include <X11/Xlib.h>
#include "ffmpeg.h"
#include "libavcodec/vdpau.h"
@ -38,23 +33,6 @@ typedef struct VDPAUContext {
AVFrame *tmp_frame;
} VDPAUContext;
typedef struct VDPAUHWDevicePriv {
VdpDeviceDestroy *device_destroy;
Display *dpy;
} VDPAUHWDevicePriv;
static void device_free(AVHWDeviceContext *ctx)
{
AVVDPAUDeviceContext *hwctx = ctx->hwctx;
VDPAUHWDevicePriv *priv = ctx->user_opaque;
if (priv->device_destroy)
priv->device_destroy(hwctx->device);
if (priv->dpy)
XCloseDisplay(priv->dpy);
av_freep(&priv);
}
static void vdpau_uninit(AVCodecContext *s)
{
InputStream *ist = s->opaque;
@ -106,15 +84,8 @@ static int vdpau_alloc(AVCodecContext *s)
InputStream *ist = s->opaque;
int loglevel = (ist->hwaccel_id == HWACCEL_AUTO) ? AV_LOG_VERBOSE : AV_LOG_ERROR;
VDPAUContext *ctx;
const char *display, *vendor;
VdpStatus err;
int ret;
VdpDevice device;
VdpGetProcAddress *get_proc_address;
VdpGetInformationString *get_information_string;
VDPAUHWDevicePriv *device_priv = NULL;
AVBufferRef *device_ref = NULL;
AVHWDeviceContext *device_ctx;
AVVDPAUDeviceContext *device_hwctx;
@ -124,12 +95,6 @@ static int vdpau_alloc(AVCodecContext *s)
if (!ctx)
return AVERROR(ENOMEM);
device_priv = av_mallocz(sizeof(*device_priv));
if (!device_priv) {
av_freep(&ctx);
goto fail;
}
ist->hwaccel_ctx = ctx;
ist->hwaccel_uninit = vdpau_uninit;
ist->hwaccel_get_buffer = vdpau_get_buffer;
@ -139,51 +104,12 @@ static int vdpau_alloc(AVCodecContext *s)
if (!ctx->tmp_frame)
goto fail;
device_priv->dpy = XOpenDisplay(ist->hwaccel_device);
if (!device_priv->dpy) {
av_log(NULL, loglevel, "Cannot open the X11 display %s.\n",
XDisplayName(ist->hwaccel_device));
goto fail;
}
display = XDisplayString(device_priv->dpy);
err = vdp_device_create_x11(device_priv->dpy, XDefaultScreen(device_priv->dpy),
&device, &get_proc_address);
if (err != VDP_STATUS_OK) {
av_log(NULL, loglevel, "VDPAU device creation on X11 display %s failed.\n",
display);
goto fail;
}
#define GET_CALLBACK(id, result) \
do { \
void *tmp; \
err = get_proc_address(device, id, &tmp); \
if (err != VDP_STATUS_OK) { \
av_log(NULL, loglevel, "Error getting the " #id " callback.\n"); \
goto fail; \
} \
result = tmp; \
} while (0)
GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, device_priv->device_destroy);
device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VDPAU);
if (!device_ref)
goto fail;
device_ctx = (AVHWDeviceContext*)device_ref->data;
device_hwctx = device_ctx->hwctx;
device_ctx->user_opaque = device_priv;
device_ctx->free = device_free;
device_hwctx->device = device;
device_hwctx->get_proc_address = get_proc_address;
device_priv = NULL;
ret = av_hwdevice_ctx_init(device_ref);
ret = av_hwdevice_ctx_create(&device_ref, AV_HWDEVICE_TYPE_VDPAU,
ist->hwaccel_device, NULL, 0);
if (ret < 0)
goto fail;
device_ctx = (AVHWDeviceContext*)device_ref->data;
device_hwctx = device_ctx->hwctx;
ctx->hw_frames_ctx = av_hwframe_ctx_alloc(device_ref);
if (!ctx->hw_frames_ctx)
@ -200,26 +126,17 @@ do {
if (ret < 0)
goto fail;
if (av_vdpau_bind_context(s, device, get_proc_address, 0))
if (av_vdpau_bind_context(s, device_hwctx->device, device_hwctx->get_proc_address, 0))
goto fail;
get_information_string(&vendor);
av_log(NULL, AV_LOG_VERBOSE, "Using VDPAU -- %s -- on X11 display %s, "
"to decode input stream #%d:%d.\n", vendor,
display, ist->file_index, ist->st->index);
av_log(NULL, AV_LOG_VERBOSE, "Using VDPAU to decode input stream #%d:%d.\n",
ist->file_index, ist->st->index);
return 0;
fail:
av_log(NULL, loglevel, "VDPAU init failed for stream #%d:%d.\n",
ist->file_index, ist->st->index);
if (device_priv) {
if (device_priv->device_destroy)
device_priv->device_destroy(device);
if (device_priv->dpy)
XCloseDisplay(device_priv->dpy);
}
av_freep(&device_priv);
av_buffer_unref(&device_ref);
vdpau_uninit(s);
return AVERROR(EINVAL);

View File

@ -633,6 +633,7 @@ OBJS-$(CONFIG_XWD_DECODER) += xwddec.o
OBJS-$(CONFIG_XWD_ENCODER) += xwdenc.o
OBJS-$(CONFIG_Y41P_DECODER) += y41pdec.o
OBJS-$(CONFIG_Y41P_ENCODER) += y41penc.o
OBJS-$(CONFIG_YLC_DECODER) += ylc.o
OBJS-$(CONFIG_YOP_DECODER) += yop.o
OBJS-$(CONFIG_YUV4_DECODER) += yuv4dec.o
OBJS-$(CONFIG_YUV4_ENCODER) += yuv4enc.o

View File

@ -370,6 +370,7 @@ void avcodec_register_all(void)
REGISTER_DECODER(XL, xl);
REGISTER_ENCDEC (XWD, xwd);
REGISTER_ENCDEC (Y41P, y41p);
REGISTER_DECODER(YLC, ylc);
REGISTER_DECODER(YOP, yop);
REGISTER_ENCDEC (YUV4, yuv4);
REGISTER_DECODER(ZERO12V, zero12v);

View File

@ -408,6 +408,7 @@ enum AVCodecID {
AV_CODEC_ID_M101,
AV_CODEC_ID_MAGICYUV,
AV_CODEC_ID_SHEERVIDEO,
AV_CODEC_ID_YLC,
/* various PCM "codecs" */
AV_CODEC_ID_FIRST_AUDIO = 0x10000, ///< A dummy id pointing at the start of audio codecs
@ -3217,9 +3218,9 @@ typedef struct AVCodecContext {
#define FF_PROFILE_MPEG4_SIMPLE_STUDIO 14
#define FF_PROFILE_MPEG4_ADVANCED_SIMPLE 15
#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 0
#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 1
#define FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION 2
#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 1
#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 2
#define FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION 32768
#define FF_PROFILE_JPEG2000_DCINEMA_2K 3
#define FF_PROFILE_JPEG2000_DCINEMA_4K 4
@ -4117,7 +4118,6 @@ void avcodec_register_all(void);
* important mainly for encoders, e.g. libx264).
*
* @return An AVCodecContext filled with default values or NULL on failure.
* @see avcodec_get_context_defaults
*/
AVCodecContext *avcodec_alloc_context3(const AVCodec *codec);
@ -4127,16 +4127,14 @@ AVCodecContext *avcodec_alloc_context3(const AVCodec *codec);
*/
void avcodec_free_context(AVCodecContext **avctx);
#if FF_API_GET_CONTEXT_DEFAULTS
/**
* Set the fields of the given AVCodecContext to default values corresponding
* to the given codec (defaults may be codec-dependent).
*
* Do not call this function if a non-NULL codec has been passed
* to avcodec_alloc_context3() that allocated this AVCodecContext.
* If codec is non-NULL, it is illegal to call avcodec_open2() with a
* different codec on this AVCodecContext.
* @deprecated This function should not be used, as closing and opening a codec
* context multiple time is not supported. A new codec context should be
* allocated for each new use.
*/
int avcodec_get_context_defaults3(AVCodecContext *s, const AVCodec *codec);
#endif
/**
* Get the AVClass for AVCodecContext. It can be used in combination with
@ -4146,6 +4144,7 @@ int avcodec_get_context_defaults3(AVCodecContext *s, const AVCodec *codec);
*/
const AVClass *avcodec_get_class(void);
#if FF_API_COPY_CONTEXT
/**
* Get the AVClass for AVFrame. It can be used in combination with
* AV_OPT_SEARCH_FAKE_OBJ for examining options.
@ -4172,8 +4171,16 @@ const AVClass *avcodec_get_subtitle_rect_class(void);
* avcodec_alloc_context3(NULL), but otherwise uninitialized
* @param src source codec context
* @return AVERROR() on error (e.g. memory allocation error), 0 on success
*
* @deprecated The semantics of this function are ill-defined and it should not
* be used. If you need to transfer the stream parameters from one codec context
* to another, use an intermediate AVCodecParameters instance and the
* avcodec_parameters_from_context() / avcodec_parameters_to_context()
* functions.
*/
attribute_deprecated
int avcodec_copy_context(AVCodecContext *dest, const AVCodecContext *src);
#endif
/**
* Allocate a new AVCodecParameters and set its fields to default values
@ -4246,9 +4253,8 @@ int avcodec_parameters_to_context(AVCodecContext *codec,
* @param avctx The context to initialize.
* @param codec The codec to open this context for. If a non-NULL codec has been
* previously passed to avcodec_alloc_context3() or
* avcodec_get_context_defaults3() for this context, then this
* parameter MUST be either NULL or equal to the previously passed
* codec.
* for this context, then this parameter MUST be either NULL or
* equal to the previously passed codec.
* @param options A dictionary filled with AVCodecContext and codec-private options.
* On return this object will be filled with options that were not found.
*
@ -4263,9 +4269,13 @@ int avcodec_open2(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **op
* (but not the AVCodecContext itself).
*
* Calling this function on an AVCodecContext that hasn't been opened will free
* the codec-specific data allocated in avcodec_alloc_context3() /
* avcodec_get_context_defaults3() with a non-NULL codec. Subsequent calls will
* do nothing.
* the codec-specific data allocated in avcodec_alloc_context3() with a non-NULL
* codec. Subsequent calls will do nothing.
*
* @note Do not use this function. Use avcodec_free_context() to destroy a
* codec context (either open or closed). Opening and closing a codec context
* multiple times is not supported anymore -- use multiple codec contexts
* instead.
*/
int avcodec_close(AVCodecContext *avctx);

View File

@ -1556,6 +1556,13 @@ static const AVCodecDescriptor codec_descriptors[] = {
.long_name = NULL_IF_CONFIG_SMALL("BitJazz SheerVideo"),
.props = AV_CODEC_PROP_INTRA_ONLY | AV_CODEC_PROP_LOSSLESS,
},
{
.id = AV_CODEC_ID_YLC,
.type = AVMEDIA_TYPE_VIDEO,
.name = "ylc",
.long_name = NULL_IF_CONFIG_SMALL("YUY2 Lossless Codec"),
.props = AV_CODEC_PROP_INTRA_ONLY | AV_CODEC_PROP_LOSSLESS,
},
/* various PCM "codecs" */
{

View File

@ -151,8 +151,8 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
/* [DIRAC_STD] 10.3.2 Frame size. frame_size(video_params) */
/* [DIRAC_STD] custom_dimensions_flag */
if (get_bits1(gb)) {
dsh->width = svq3_get_ue_golomb(gb); /* [DIRAC_STD] FRAME_WIDTH */
dsh->height = svq3_get_ue_golomb(gb); /* [DIRAC_STD] FRAME_HEIGHT */
dsh->width = get_interleaved_ue_golomb(gb); /* [DIRAC_STD] FRAME_WIDTH */
dsh->height = get_interleaved_ue_golomb(gb); /* [DIRAC_STD] FRAME_HEIGHT */
}
/* [DIRAC_STD] 10.3.3 Chroma Sampling Format.
@ -160,7 +160,7 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
/* [DIRAC_STD] custom_chroma_format_flag */
if (get_bits1(gb))
/* [DIRAC_STD] CHROMA_FORMAT_INDEX */
dsh->chroma_format = svq3_get_ue_golomb(gb);
dsh->chroma_format = get_interleaved_ue_golomb(gb);
if (dsh->chroma_format > 2U) {
if (log_ctx)
av_log(log_ctx, AV_LOG_ERROR, "Unknown chroma format %d\n",
@ -172,22 +172,22 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
/* [DIRAC_STD] custom_scan_format_flag */
if (get_bits1(gb))
/* [DIRAC_STD] SOURCE_SAMPLING */
dsh->interlaced = svq3_get_ue_golomb(gb);
dsh->interlaced = get_interleaved_ue_golomb(gb);
if (dsh->interlaced > 1U)
return AVERROR_INVALIDDATA;
/* [DIRAC_STD] 10.3.5 Frame Rate. frame_rate(video_params) */
if (get_bits1(gb)) { /* [DIRAC_STD] custom_frame_rate_flag */
dsh->frame_rate_index = svq3_get_ue_golomb(gb);
dsh->frame_rate_index = get_interleaved_ue_golomb(gb);
if (dsh->frame_rate_index > 10U)
return AVERROR_INVALIDDATA;
if (!dsh->frame_rate_index) {
/* [DIRAC_STD] FRAME_RATE_NUMER */
frame_rate.num = svq3_get_ue_golomb(gb);
frame_rate.num = get_interleaved_ue_golomb(gb);
/* [DIRAC_STD] FRAME_RATE_DENOM */
frame_rate.den = svq3_get_ue_golomb(gb);
frame_rate.den = get_interleaved_ue_golomb(gb);
}
}
/* [DIRAC_STD] preset_frame_rate(video_params, index) */
@ -204,14 +204,14 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
* pixel_aspect_ratio(video_params) */
if (get_bits1(gb)) { /* [DIRAC_STD] custom_pixel_aspect_ratio_flag */
/* [DIRAC_STD] index */
dsh->aspect_ratio_index = svq3_get_ue_golomb(gb);
dsh->aspect_ratio_index = get_interleaved_ue_golomb(gb);
if (dsh->aspect_ratio_index > 6U)
return AVERROR_INVALIDDATA;
if (!dsh->aspect_ratio_index) {
dsh->sample_aspect_ratio.num = svq3_get_ue_golomb(gb);
dsh->sample_aspect_ratio.den = svq3_get_ue_golomb(gb);
dsh->sample_aspect_ratio.num = get_interleaved_ue_golomb(gb);
dsh->sample_aspect_ratio.den = get_interleaved_ue_golomb(gb);
}
}
/* [DIRAC_STD] Take value from Table 10.4 Available preset pixel
@ -223,13 +223,13 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
/* [DIRAC_STD] 10.3.7 Clean area. clean_area(video_params) */
if (get_bits1(gb)) { /* [DIRAC_STD] custom_clean_area_flag */
/* [DIRAC_STD] CLEAN_WIDTH */
dsh->clean_width = svq3_get_ue_golomb(gb);
dsh->clean_width = get_interleaved_ue_golomb(gb);
/* [DIRAC_STD] CLEAN_HEIGHT */
dsh->clean_height = svq3_get_ue_golomb(gb);
dsh->clean_height = get_interleaved_ue_golomb(gb);
/* [DIRAC_STD] CLEAN_LEFT_OFFSET */
dsh->clean_left_offset = svq3_get_ue_golomb(gb);
dsh->clean_left_offset = get_interleaved_ue_golomb(gb);
/* [DIRAC_STD] CLEAN_RIGHT_OFFSET */
dsh->clean_right_offset = svq3_get_ue_golomb(gb);
dsh->clean_right_offset = get_interleaved_ue_golomb(gb);
}
/* [DIRAC_STD] 10.3.8 Signal range. signal_range(video_params)
@ -237,17 +237,17 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
* AVCOL_RANGE_MPEG/JPEG values */
if (get_bits1(gb)) { /* [DIRAC_STD] custom_signal_range_flag */
/* [DIRAC_STD] index */
dsh->pixel_range_index = svq3_get_ue_golomb(gb);
dsh->pixel_range_index = get_interleaved_ue_golomb(gb);
if (dsh->pixel_range_index > 4U)
return AVERROR_INVALIDDATA;
/* This assumes either fullrange or MPEG levels only */
if (!dsh->pixel_range_index) {
luma_offset = svq3_get_ue_golomb(gb);
luma_depth = av_log2(svq3_get_ue_golomb(gb)) + 1;
svq3_get_ue_golomb(gb); /* chroma offset */
svq3_get_ue_golomb(gb); /* chroma excursion */
luma_offset = get_interleaved_ue_golomb(gb);
luma_depth = av_log2(get_interleaved_ue_golomb(gb)) + 1;
get_interleaved_ue_golomb(gb); /* chroma offset */
get_interleaved_ue_golomb(gb); /* chroma excursion */
dsh->color_range = luma_offset ? AVCOL_RANGE_MPEG
: AVCOL_RANGE_JPEG;
}
@ -279,7 +279,7 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
/* [DIRAC_STD] 10.3.9 Colour specification. colour_spec(video_params) */
if (get_bits1(gb)) { /* [DIRAC_STD] custom_colour_spec_flag */
/* [DIRAC_STD] index */
idx = dsh->color_spec_index = svq3_get_ue_golomb(gb);
idx = dsh->color_spec_index = get_interleaved_ue_golomb(gb);
if (dsh->color_spec_index > 4U)
return AVERROR_INVALIDDATA;
@ -291,20 +291,20 @@ static int parse_source_parameters(AVDiracSeqHeader *dsh, GetBitContext *gb,
if (!dsh->color_spec_index) {
/* [DIRAC_STD] 10.3.9.1 Colour primaries */
if (get_bits1(gb)) {
idx = svq3_get_ue_golomb(gb);
idx = get_interleaved_ue_golomb(gb);
if (idx < 3U)
dsh->color_primaries = dirac_primaries[idx];
}
/* [DIRAC_STD] 10.3.9.2 Colour matrix */
if (get_bits1(gb)) {
idx = svq3_get_ue_golomb(gb);
idx = get_interleaved_ue_golomb(gb);
if (!idx)
dsh->colorspace = AVCOL_SPC_BT709;
else if (idx == 1)
dsh->colorspace = AVCOL_SPC_BT470BG;
}
/* [DIRAC_STD] 10.3.9.3 Transfer function */
if (get_bits1(gb) && !svq3_get_ue_golomb(gb))
if (get_bits1(gb) && !get_interleaved_ue_golomb(gb))
dsh->color_trc = AVCOL_TRC_BT709;
}
} else {
@ -336,13 +336,13 @@ int av_dirac_parse_sequence_header(AVDiracSeqHeader **pdsh,
goto fail;
/* [DIRAC_SPEC] 10.1 Parse Parameters. parse_parameters() */
dsh->version.major = svq3_get_ue_golomb(&gb);
dsh->version.minor = svq3_get_ue_golomb(&gb);
dsh->profile = svq3_get_ue_golomb(&gb);
dsh->level = svq3_get_ue_golomb(&gb);
dsh->version.major = get_interleaved_ue_golomb(&gb);
dsh->version.minor = get_interleaved_ue_golomb(&gb);
dsh->profile = get_interleaved_ue_golomb(&gb);
dsh->level = get_interleaved_ue_golomb(&gb);
/* [DIRAC_SPEC] sequence_header() -> base_video_format as defined in
* 10.2 Base Video Format, table 10.1 Dirac predefined video formats */
video_format = svq3_get_ue_golomb(&gb);
video_format = get_interleaved_ue_golomb(&gb);
if (dsh->version.major < 2 && log_ctx)
av_log(log_ctx, AV_LOG_WARNING, "Stream is old and may not work\n");
@ -377,7 +377,7 @@ int av_dirac_parse_sequence_header(AVDiracSeqHeader **pdsh,
/* [DIRAC_STD] picture_coding_mode shall be 0 for fields and 1 for frames
* currently only used to signal field coding */
picture_coding_mode = svq3_get_ue_golomb(&gb);
picture_coding_mode = get_interleaved_ue_golomb(&gb);
if (picture_coding_mode != 0) {
if (log_ctx) {
av_log(log_ctx, AV_LOG_ERROR, "Unsupported picture coding mode %d",

View File

@ -671,9 +671,9 @@ static void decode_component(DiracContext *s, int comp)
align_get_bits(&s->gb);
/* [DIRAC_STD] 13.4.2 subband() */
b->length = svq3_get_ue_golomb(&s->gb);
b->length = get_interleaved_ue_golomb(&s->gb);
if (b->length) {
b->quant = svq3_get_ue_golomb(&s->gb);
b->quant = get_interleaved_ue_golomb(&s->gb);
align_get_bits(&s->gb);
b->coeff_data = s->gb.buffer + get_bits_count(&s->gb)/8;
b->length = FFMIN(b->length, FFMAX(get_bits_left(&s->gb)/8, 0));
@ -1001,7 +1001,7 @@ static int dirac_unpack_prediction_parameters(DiracContext *s)
align_get_bits(gb);
/* [DIRAC_STD] 11.2.2 Block parameters. block_parameters() */
/* Luma and Chroma are equal. 11.2.3 */
idx = svq3_get_ue_golomb(gb); /* [DIRAC_STD] index */
idx = get_interleaved_ue_golomb(gb); /* [DIRAC_STD] index */
if (idx > 4) {
av_log(s->avctx, AV_LOG_ERROR, "Block prediction index too high\n");
@ -1009,10 +1009,10 @@ static int dirac_unpack_prediction_parameters(DiracContext *s)
}
if (idx == 0) {
s->plane[0].xblen = svq3_get_ue_golomb(gb);
s->plane[0].yblen = svq3_get_ue_golomb(gb);
s->plane[0].xbsep = svq3_get_ue_golomb(gb);
s->plane[0].ybsep = svq3_get_ue_golomb(gb);
s->plane[0].xblen = get_interleaved_ue_golomb(gb);
s->plane[0].yblen = get_interleaved_ue_golomb(gb);
s->plane[0].xbsep = get_interleaved_ue_golomb(gb);
s->plane[0].ybsep = get_interleaved_ue_golomb(gb);
} else {
/*[DIRAC_STD] preset_block_params(index). Table 11.1 */
s->plane[0].xblen = default_blen[idx-1];
@ -1046,7 +1046,7 @@ static int dirac_unpack_prediction_parameters(DiracContext *s)
/*[DIRAC_STD] 11.2.5 Motion vector precision. motion_vector_precision()
Read motion vector precision */
s->mv_precision = svq3_get_ue_golomb(gb);
s->mv_precision = get_interleaved_ue_golomb(gb);
if (s->mv_precision > 3) {
av_log(s->avctx, AV_LOG_ERROR, "MV precision finer than eighth-pel\n");
return AVERROR_INVALIDDATA;
@ -1066,7 +1066,7 @@ static int dirac_unpack_prediction_parameters(DiracContext *s)
/* [DIRAC_STD] zoom_rotate_shear(gparams)
zoom/rotation/shear parameters */
if (get_bits1(gb)) {
s->globalmc[ref].zrs_exp = svq3_get_ue_golomb(gb);
s->globalmc[ref].zrs_exp = get_interleaved_ue_golomb(gb);
s->globalmc[ref].zrs[0][0] = dirac_get_se_golomb(gb);
s->globalmc[ref].zrs[0][1] = dirac_get_se_golomb(gb);
s->globalmc[ref].zrs[1][0] = dirac_get_se_golomb(gb);
@ -1077,7 +1077,7 @@ static int dirac_unpack_prediction_parameters(DiracContext *s)
}
/* [DIRAC_STD] perspective(gparams) */
if (get_bits1(gb)) {
s->globalmc[ref].perspective_exp = svq3_get_ue_golomb(gb);
s->globalmc[ref].perspective_exp = get_interleaved_ue_golomb(gb);
s->globalmc[ref].perspective[0] = dirac_get_se_golomb(gb);
s->globalmc[ref].perspective[1] = dirac_get_se_golomb(gb);
}
@ -1086,7 +1086,7 @@ static int dirac_unpack_prediction_parameters(DiracContext *s)
/*[DIRAC_STD] 11.2.7 Picture prediction mode. prediction_mode()
Picture prediction mode, not currently used. */
if (svq3_get_ue_golomb(gb)) {
if (get_interleaved_ue_golomb(gb)) {
av_log(s->avctx, AV_LOG_ERROR, "Unknown picture prediction mode\n");
return AVERROR_INVALIDDATA;
}
@ -1098,7 +1098,7 @@ static int dirac_unpack_prediction_parameters(DiracContext *s)
s->weight[1] = 1;
if (get_bits1(gb)) {
s->weight_log2denom = svq3_get_ue_golomb(gb);
s->weight_log2denom = get_interleaved_ue_golomb(gb);
s->weight[0] = dirac_get_se_golomb(gb);
if (s->num_refs == 2)
s->weight[1] = dirac_get_se_golomb(gb);
@ -1117,7 +1117,7 @@ static int dirac_unpack_idwt_params(DiracContext *s)
unsigned tmp;
#define CHECKEDREAD(dst, cond, errmsg) \
tmp = svq3_get_ue_golomb(gb); \
tmp = get_interleaved_ue_golomb(gb); \
if (cond) { \
av_log(s->avctx, AV_LOG_ERROR, errmsg); \
return AVERROR_INVALIDDATA; \
@ -1151,18 +1151,18 @@ static int dirac_unpack_idwt_params(DiracContext *s)
}
}
else {
s->num_x = svq3_get_ue_golomb(gb);
s->num_y = svq3_get_ue_golomb(gb);
s->num_x = get_interleaved_ue_golomb(gb);
s->num_y = get_interleaved_ue_golomb(gb);
if (s->ld_picture) {
s->lowdelay.bytes.num = svq3_get_ue_golomb(gb);
s->lowdelay.bytes.den = svq3_get_ue_golomb(gb);
s->lowdelay.bytes.num = get_interleaved_ue_golomb(gb);
s->lowdelay.bytes.den = get_interleaved_ue_golomb(gb);
if (s->lowdelay.bytes.den <= 0) {
av_log(s->avctx,AV_LOG_ERROR,"Invalid lowdelay.bytes.den\n");
return AVERROR_INVALIDDATA;
}
} else if (s->hq_picture) {
s->highquality.prefix_bytes = svq3_get_ue_golomb(gb);
s->highquality.size_scaler = svq3_get_ue_golomb(gb);
s->highquality.prefix_bytes = get_interleaved_ue_golomb(gb);
s->highquality.size_scaler = get_interleaved_ue_golomb(gb);
if (s->highquality.prefix_bytes >= INT_MAX / 8) {
av_log(s->avctx,AV_LOG_ERROR,"too many prefix bytes\n");
return AVERROR_INVALIDDATA;
@ -1173,11 +1173,11 @@ static int dirac_unpack_idwt_params(DiracContext *s)
if (get_bits1(gb)) {
av_log(s->avctx,AV_LOG_DEBUG,"Low Delay: Has Custom Quantization Matrix!\n");
/* custom quantization matrix */
s->lowdelay.quant[0][0] = svq3_get_ue_golomb(gb);
s->lowdelay.quant[0][0] = get_interleaved_ue_golomb(gb);
for (level = 0; level < s->wavelet_depth; level++) {
s->lowdelay.quant[level][1] = svq3_get_ue_golomb(gb);
s->lowdelay.quant[level][2] = svq3_get_ue_golomb(gb);
s->lowdelay.quant[level][3] = svq3_get_ue_golomb(gb);
s->lowdelay.quant[level][1] = get_interleaved_ue_golomb(gb);
s->lowdelay.quant[level][2] = get_interleaved_ue_golomb(gb);
s->lowdelay.quant[level][3] = get_interleaved_ue_golomb(gb);
}
} else {
if (s->wavelet_depth > 4) {
@ -1388,7 +1388,7 @@ static int dirac_unpack_block_motion_data(DiracContext *s)
/* [DIRAC_STD] 12.3.1 Superblock splitting modes. superblock_split_modes()
decode superblock split modes */
ff_dirac_init_arith_decoder(arith, gb, svq3_get_ue_golomb(gb)); /* svq3_get_ue_golomb(gb) is the length */
ff_dirac_init_arith_decoder(arith, gb, get_interleaved_ue_golomb(gb)); /* get_interleaved_ue_golomb(gb) is the length */
for (y = 0; y < s->sbheight; y++) {
for (x = 0; x < s->sbwidth; x++) {
unsigned int split = dirac_get_arith_uint(arith, CTX_SB_F1, CTX_SB_DATA);
@ -1400,13 +1400,13 @@ static int dirac_unpack_block_motion_data(DiracContext *s)
}
/* setup arith decoding */
ff_dirac_init_arith_decoder(arith, gb, svq3_get_ue_golomb(gb));
ff_dirac_init_arith_decoder(arith, gb, get_interleaved_ue_golomb(gb));
for (i = 0; i < s->num_refs; i++) {
ff_dirac_init_arith_decoder(arith + 4 + 2 * i, gb, svq3_get_ue_golomb(gb));
ff_dirac_init_arith_decoder(arith + 5 + 2 * i, gb, svq3_get_ue_golomb(gb));
ff_dirac_init_arith_decoder(arith + 4 + 2 * i, gb, get_interleaved_ue_golomb(gb));
ff_dirac_init_arith_decoder(arith + 5 + 2 * i, gb, get_interleaved_ue_golomb(gb));
}
for (i = 0; i < 3; i++)
ff_dirac_init_arith_decoder(arith+1+i, gb, svq3_get_ue_golomb(gb));
ff_dirac_init_arith_decoder(arith+1+i, gb, get_interleaved_ue_golomb(gb));
for (y = 0; y < s->sbheight; y++)
for (x = 0; x < s->sbwidth; x++) {

View File

@ -487,7 +487,7 @@ static inline const uint8_t *align_get_bits(GetBitContext *s)
SKIP_BITS(name, gb, n); \
} while (0)
#define GET_RL_VLC_INTERNAL(level, run, name, gb, table, bits, \
#define GET_RL_VLC(level, run, name, gb, table, bits, \
max_depth, need_update) \
do { \
int n, nb_bits; \
@ -584,89 +584,4 @@ static inline int skip_1stop_8data_bits(GetBitContext *gb)
return 0;
}
//#define TRACE
#ifdef TRACE
static inline void print_bin(int bits, int n)
{
int i;
for (i = n - 1; i >= 0; i--)
av_log(NULL, AV_LOG_DEBUG, "%d", (bits >> i) & 1);
for (i = n; i < 24; i++)
av_log(NULL, AV_LOG_DEBUG, " ");
}
static inline int get_bits_trace(GetBitContext *s, int n, const char *file,
const char *func, int line)
{
int r = get_bits(s, n);
print_bin(r, n);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d bit @%5d in %s %s:%d\n",
r, n, r, get_bits_count(s) - n, file, func, line);
return r;
}
static inline int get_vlc_trace(GetBitContext *s, VLC_TYPE (*table)[2],
int bits, int max_depth, const char *file,
const char *func, int line)
{
int show = show_bits(s, 24);
int pos = get_bits_count(s);
int r = get_vlc2(s, table, bits, max_depth);
int len = get_bits_count(s) - pos;
int bits2 = show >> (24 - len);
print_bin(bits2, len);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d vlc @%5d in %s %s:%d\n",
bits2, len, r, pos, file, func, line);
return r;
}
#define GET_RL_VLC(level, run, name, gb, table, bits, \
max_depth, need_update) \
do { \
int show = SHOW_UBITS(name, gb, 24); \
int len; \
int pos = name ## _index; \
\
GET_RL_VLC_INTERNAL(level, run, name, gb, table, bits,max_depth, need_update); \
\
len = name ## _index - pos + 1; \
show = show >> (24 - len); \
\
print_bin(show, len); \
\
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d/%-3d rlv @%5d in %s %s:%d\n",\
show, len, run-1, level, pos, __FILE__, __PRETTY_FUNCTION__, __LINE__);\
} while (0) \
static inline int get_xbits_trace(GetBitContext *s, int n, const char *file,
const char *func, int line)
{
int show = show_bits(s, n);
int r = get_xbits(s, n);
print_bin(show, n);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d xbt @%5d in %s %s:%d\n",
show, n, r, get_bits_count(s) - n, file, func, line);
return r;
}
#define get_bits(s, n) get_bits_trace(s , n, __FILE__, __PRETTY_FUNCTION__, __LINE__)
#define get_bits1(s) get_bits_trace(s, 1, __FILE__, __PRETTY_FUNCTION__, __LINE__)
#define get_xbits(s, n) get_xbits_trace(s, n, __FILE__, __PRETTY_FUNCTION__, __LINE__)
#define get_vlc(s, vlc) get_vlc_trace(s, (vlc)->table, (vlc)->bits, 3, __FILE__, __PRETTY_FUNCTION__, __LINE__)
#define get_vlc2(s, tab, bits, max) get_vlc_trace(s, tab, bits, max, __FILE__, __PRETTY_FUNCTION__, __LINE__)
#else //TRACE
#define GET_RL_VLC GET_RL_VLC_INTERNAL
#endif
#endif /* AVCODEC_GET_BITS_H */

View File

@ -112,7 +112,7 @@ static inline int get_ue_golomb_31(GetBitContext *gb)
return ff_ue_golomb_vlc_code[buf];
}
static inline unsigned svq3_get_ue_golomb(GetBitContext *gb)
static inline unsigned get_interleaved_ue_golomb(GetBitContext *gb)
{
uint32_t buf;
@ -219,7 +219,7 @@ static inline int get_se_golomb_long(GetBitContext *gb)
return ((buf >> 1) ^ sign) + 1;
}
static inline int svq3_get_se_golomb(GetBitContext *gb)
static inline int get_interleaved_se_golomb(GetBitContext *gb)
{
unsigned int buf;
@ -254,7 +254,7 @@ static inline int svq3_get_se_golomb(GetBitContext *gb)
static inline int dirac_get_se_golomb(GetBitContext *gb)
{
uint32_t ret = svq3_get_ue_golomb(gb);
uint32_t ret = get_interleaved_ue_golomb(gb);
if (ret) {
int sign = -get_bits1(gb);
@ -409,8 +409,6 @@ static inline int get_ue(GetBitContext *s, const char *file, const char *func,
int len = get_bits_count(s) - pos;
int bits = show >> (24 - len);
print_bin(bits, len);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d ue @%5d in %s %s:%d\n",
bits, len, i, pos, file, func, line);
@ -426,8 +424,6 @@ static inline int get_se(GetBitContext *s, const char *file, const char *func,
int len = get_bits_count(s) - pos;
int bits = show >> (24 - len);
print_bin(bits, len);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d se @%5d in %s %s:%d\n",
bits, len, i, pos, file, func, line);
@ -443,8 +439,6 @@ static inline int get_te(GetBitContext *s, int r, char *file, const char *func,
int len = get_bits_count(s) - pos;
int bits = show >> (24 - len);
print_bin(bits, len);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d te @%5d in %s %s:%d\n",
bits, len, i, pos, file, func, line);

View File

@ -422,10 +422,6 @@ static inline int get_level_prefix(GetBitContext *gb){
buf=GET_CACHE(re, gb);
log= 32 - av_log2(buf);
#ifdef TRACE
print_bin(buf>>(32-log), log);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d lpr @%5d in %s get_level_prefix\n", buf>>(32-log), log, log-1, get_bits_count(gb), __FILE__);
#endif
LAST_SKIP_BITS(re, gb, log);
CLOSE_READER(re, gb);

View File

@ -352,6 +352,12 @@ static av_cold int libopenjpeg_encode_init(AVCodecContext *avctx)
ctx->enc_params.cp_cinema = ctx->cinema_mode;
#endif
if (!ctx->numresolution) {
ctx->numresolution = 6;
while (FFMIN(avctx->width, avctx->height) >> ctx->numresolution < 1)
ctx->numresolution --;
}
ctx->enc_params.mode = !!avctx->global_quality;
ctx->enc_params.prog_order = ctx->prog_order;
ctx->enc_params.numresolution = ctx->numresolution;
@ -815,7 +821,7 @@ static const AVOption options[] = {
{ "rpcl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ(RPCL) }, 0, 0, VE, "prog_order" },
{ "pcrl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ(PCRL) }, 0, 0, VE, "prog_order" },
{ "cprl", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = OPJ(CPRL) }, 0, 0, VE, "prog_order" },
{ "numresolution", NULL, OFFSET(numresolution), AV_OPT_TYPE_INT, { .i64 = 6 }, 1, INT_MAX, VE },
{ "numresolution", NULL, OFFSET(numresolution), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, VE },
{ "numlayers", NULL, OFFSET(numlayers), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 10, VE },
{ "disto_alloc", NULL, OFFSET(disto_alloc), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, VE },
{ "fixed_alloc", NULL, OFFSET(fixed_alloc), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },

View File

@ -71,6 +71,7 @@ typedef struct MPADecodeContext {
MPA_DECODE_HEADER
uint8_t last_buf[LAST_BUF_SIZE];
int last_buf_size;
int extrasize;
/* next header (used in free format parsing) */
uint32_t free_format_next_header;
GetBitContext gb;
@ -819,9 +820,10 @@ static void exponents_from_scale_factors(MPADecodeContext *s, GranuleDef *g,
static void switch_buffer(MPADecodeContext *s, int *pos, int *end_pos,
int *end_pos2)
{
if (s->in_gb.buffer && *pos >= s->gb.size_in_bits) {
if (s->in_gb.buffer && *pos >= s->gb.size_in_bits - s->extrasize * 8) {
s->gb = s->in_gb;
s->in_gb.buffer = NULL;
s->extrasize = 0;
av_assert2((get_bits_count(&s->gb) & 7) == 0);
skip_bits_long(&s->gb, *pos - *end_pos);
*end_pos2 =
@ -853,7 +855,7 @@ static int huffman_decode(MPADecodeContext *s, GranuleDef *g,
int i;
int last_pos, bits_left;
VLC *vlc;
int end_pos = FFMIN(end_pos2, s->gb.size_in_bits);
int end_pos = FFMIN(end_pos2, s->gb.size_in_bits - s->extrasize * 8);
/* low frequencies (called big values) */
s_index = 0;
@ -1387,18 +1389,16 @@ static int mp_decode_layer3(MPADecodeContext *s)
if (!s->adu_mode) {
int skip;
const uint8_t *ptr = s->gb.buffer + (get_bits_count(&s->gb)>>3);
int extrasize = av_clip(get_bits_left(&s->gb) >> 3, 0, EXTRABYTES);
s->extrasize = av_clip((get_bits_left(&s->gb) >> 3) - s->extrasize, 0,
FFMAX(0, LAST_BUF_SIZE - s->last_buf_size));
av_assert1((get_bits_count(&s->gb) & 7) == 0);
/* now we get bits from the main_data_begin offset */
ff_dlog(s->avctx, "seekback:%d, lastbuf:%d\n",
main_data_begin, s->last_buf_size);
memcpy(s->last_buf + s->last_buf_size, ptr, extrasize);
memcpy(s->last_buf + s->last_buf_size, ptr, s->extrasize);
s->in_gb = s->gb;
init_get_bits(&s->gb, s->last_buf, s->last_buf_size*8);
#if !UNCHECKED_BITSTREAM_READER
s->gb.size_in_bits_plus8 += FFMAX(extrasize, LAST_BUF_SIZE - s->last_buf_size) * 8;
#endif
init_get_bits(&s->gb, s->last_buf, (s->last_buf_size + s->extrasize) * 8);
s->last_buf_size <<= 3;
for (gr = 0; gr < nb_granules && (s->last_buf_size >> 3) < main_data_begin; gr++) {
for (ch = 0; ch < s->nb_channels; ch++) {
@ -1409,15 +1409,17 @@ static int mp_decode_layer3(MPADecodeContext *s)
}
}
skip = s->last_buf_size - 8 * main_data_begin;
if (skip >= s->gb.size_in_bits && s->in_gb.buffer) {
skip_bits_long(&s->in_gb, skip - s->gb.size_in_bits);
if (skip >= s->gb.size_in_bits - s->extrasize * 8 && s->in_gb.buffer) {
skip_bits_long(&s->in_gb, skip - s->gb.size_in_bits + s->extrasize * 8);
s->gb = s->in_gb;
s->in_gb.buffer = NULL;
s->extrasize = 0;
} else {
skip_bits_long(&s->gb, skip);
}
} else {
gr = 0;
s->extrasize = 0;
}
for (; gr < nb_granules; gr++) {
@ -1582,7 +1584,7 @@ static int mp_decode_frame(MPADecodeContext *s, OUT_INT **samples,
s->last_buf_size=0;
if (s->in_gb.buffer) {
align_get_bits(&s->gb);
i = get_bits_left(&s->gb)>>3;
i = (get_bits_left(&s->gb) >> 3) - s->extrasize;
if (i >= 0 && i <= BACKSTEP_SIZE) {
memmove(s->last_buf, s->gb.buffer + (get_bits_count(&s->gb)>>3), i);
s->last_buf_size=i;
@ -1590,12 +1592,12 @@ static int mp_decode_frame(MPADecodeContext *s, OUT_INT **samples,
av_log(s->avctx, AV_LOG_ERROR, "invalid old backstep %d\n", i);
s->gb = s->in_gb;
s->in_gb.buffer = NULL;
s->extrasize = 0;
}
align_get_bits(&s->gb);
av_assert1((get_bits_count(&s->gb) & 7) == 0);
i = get_bits_left(&s->gb) >> 3;
i = (get_bits_left(&s->gb) >> 3) - s->extrasize;
if (i < 0 || i > BACKSTEP_SIZE || nb_frames < 0) {
if (i < 0)
av_log(s->avctx, AV_LOG_ERROR, "invalid new backstep %d\n", i);

View File

@ -28,7 +28,6 @@
#include "mpegvideo.h"
#include "msmpeg4.h"
#include "libavutil/imgutils.h"
#include "libavutil/x86/asm.h"
#include "h263.h"
#include "mpeg4video.h"
#include "msmpeg4data.h"

View File

@ -89,7 +89,7 @@ static const AVClass av_codec_context_class = {
.get_category = get_category,
};
int avcodec_get_context_defaults3(AVCodecContext *s, const AVCodec *codec)
static int init_context_defaults(AVCodecContext *s, const AVCodec *codec)
{
int flags=0;
memset(s, 0, sizeof(AVCodecContext));
@ -146,6 +146,13 @@ int avcodec_get_context_defaults3(AVCodecContext *s, const AVCodec *codec)
return 0;
}
#if FF_API_GET_CONTEXT_DEFAULTS
int avcodec_get_context_defaults3(AVCodecContext *s, const AVCodec *codec)
{
return init_context_defaults(s, codec);
}
#endif
AVCodecContext *avcodec_alloc_context3(const AVCodec *codec)
{
AVCodecContext *avctx= av_malloc(sizeof(AVCodecContext));
@ -153,7 +160,7 @@ AVCodecContext *avcodec_alloc_context3(const AVCodec *codec)
if (!avctx)
return NULL;
if(avcodec_get_context_defaults3(avctx, codec) < 0){
if (init_context_defaults(avctx, codec) < 0) {
av_free(avctx);
return NULL;
}
@ -179,6 +186,7 @@ void avcodec_free_context(AVCodecContext **pavctx)
av_freep(pavctx);
}
#if FF_API_COPY_CONTEXT
int avcodec_copy_context(AVCodecContext *dest, const AVCodecContext *src)
{
const AVCodec *orig_codec = dest->codec;
@ -225,6 +233,7 @@ FF_ENABLE_DEPRECATION_WARNINGS
dest->inter_matrix = NULL;
dest->rc_override = NULL;
dest->subtitle_header = NULL;
dest->hw_frames_ctx = NULL;
#define alloc_and_copy_or_fail(obj, size, pad) \
if (src->obj && size > 0) { \
@ -245,19 +254,27 @@ FF_ENABLE_DEPRECATION_WARNINGS
av_assert0(dest->subtitle_header_size == src->subtitle_header_size);
#undef alloc_and_copy_or_fail
if (src->hw_frames_ctx) {
dest->hw_frames_ctx = av_buffer_ref(src->hw_frames_ctx);
if (!dest->hw_frames_ctx)
goto fail;
}
return 0;
fail:
av_freep(&dest->subtitle_header);
av_freep(&dest->rc_override);
av_freep(&dest->intra_matrix);
av_freep(&dest->inter_matrix);
av_freep(&dest->extradata);
av_freep(&dest->subtitle_header);
av_buffer_unref(&dest->hw_frames_ctx);
dest->subtitle_header_size = 0;
dest->extradata_size = 0;
av_opt_free(dest);
return AVERROR(ENOMEM);
}
#endif
const AVClass *avcodec_get_class(void)
{

View File

@ -89,7 +89,7 @@ static int rv30_decode_intra_types(RV34DecContext *r, GetBitContext *gb, int8_t
for(i = 0; i < 4; i++, dst += r->intra_types_stride - 4){
for(j = 0; j < 4; j+= 2){
unsigned code = svq3_get_ue_golomb(gb) << 1;
unsigned code = get_interleaved_ue_golomb(gb) << 1;
if (code > 80U*2U) {
av_log(r->s.avctx, AV_LOG_ERROR, "Incorrect intra prediction code\n");
return -1;
@ -117,7 +117,7 @@ static int rv30_decode_mb_info(RV34DecContext *r)
static const int rv30_b_types[6] = { RV34_MB_SKIP, RV34_MB_B_DIRECT, RV34_MB_B_FORWARD, RV34_MB_B_BACKWARD, RV34_MB_TYPE_INTRA, RV34_MB_TYPE_INTRA16x16 };
MpegEncContext *s = &r->s;
GetBitContext *gb = &s->gb;
unsigned code = svq3_get_ue_golomb(gb);
unsigned code = get_interleaved_ue_golomb(gb);
if (code > 11) {
av_log(s->avctx, AV_LOG_ERROR, "Incorrect MB type code\n");

View File

@ -864,8 +864,8 @@ static int rv34_decode_mv(RV34DecContext *r, int block_type)
memset(r->dmv, 0, sizeof(r->dmv));
for(i = 0; i < num_mvs[block_type]; i++){
r->dmv[i][0] = svq3_get_se_golomb(gb);
r->dmv[i][1] = svq3_get_se_golomb(gb);
r->dmv[i][0] = get_interleaved_se_golomb(gb);
r->dmv[i][1] = get_interleaved_se_golomb(gb);
}
switch(block_type){
case RV34_MB_TYPE_INTRA:

View File

@ -231,7 +231,7 @@ static int rv40_decode_mb_info(RV34DecContext *r)
int mb_pos = s->mb_x + s->mb_y * s->mb_stride;
if(!r->s.mb_skip_run) {
r->s.mb_skip_run = svq3_get_ue_golomb(gb) + 1;
r->s.mb_skip_run = get_interleaved_ue_golomb(gb) + 1;
if(r->s.mb_skip_run > (unsigned)s->mb_num)
return -1;
}

View File

@ -295,7 +295,7 @@ static inline int svq3_decode_block(GetBitContext *gb, int16_t *block,
const uint8_t *const scan = scan_patterns[type];
for (limit = (16 >> intra); index < 16; index = limit, limit += 8) {
for (; (vlc = svq3_get_ue_golomb(gb)) != 0; index++) {
for (; (vlc = get_interleaved_ue_golomb(gb)) != 0; index++) {
if ((int32_t)vlc < 0)
return -1;
@ -534,8 +534,8 @@ static inline int svq3_mc_dir(SVQ3Context *s, int size, int mode,
if (mode == PREDICT_MODE) {
dx = dy = 0;
} else {
dy = svq3_get_se_golomb(&s->gb_slice);
dx = svq3_get_se_golomb(&s->gb_slice);
dy = get_interleaved_se_golomb(&s->gb_slice);
dx = get_interleaved_se_golomb(&s->gb_slice);
if (dx == INVALID_VLC || dy == INVALID_VLC) {
av_log(s->avctx, AV_LOG_ERROR, "invalid MV vlc\n");
@ -846,7 +846,7 @@ static int svq3_decode_mb(SVQ3Context *s, unsigned int mb_type)
/* decode prediction codes for luma blocks */
for (i = 0; i < 16; i += 2) {
vlc = svq3_get_ue_golomb(&s->gb_slice);
vlc = get_interleaved_ue_golomb(&s->gb_slice);
if (vlc >= 25U) {
av_log(s->avctx, AV_LOG_ERROR,
@ -924,7 +924,7 @@ static int svq3_decode_mb(SVQ3Context *s, unsigned int mb_type)
if (!IS_INTRA16x16(mb_type) &&
(!IS_SKIP(mb_type) || s->pict_type == AV_PICTURE_TYPE_B)) {
if ((vlc = svq3_get_ue_golomb(&s->gb_slice)) >= 48U){
if ((vlc = get_interleaved_ue_golomb(&s->gb_slice)) >= 48U){
av_log(s->avctx, AV_LOG_ERROR, "cbp_vlc=%"PRIu32"\n", vlc);
return -1;
}
@ -934,7 +934,7 @@ static int svq3_decode_mb(SVQ3Context *s, unsigned int mb_type)
}
if (IS_INTRA16x16(mb_type) ||
(s->pict_type != AV_PICTURE_TYPE_I && s->adaptive_quant && cbp)) {
s->qscale += svq3_get_se_golomb(&s->gb_slice);
s->qscale += get_interleaved_se_golomb(&s->gb_slice);
if (s->qscale > 31u) {
av_log(s->avctx, AV_LOG_ERROR, "qscale:%d\n", s->qscale);
@ -1052,7 +1052,7 @@ static int svq3_decode_slice_header(AVCodecContext *avctx)
skip_bits_long(&s->gb, slice_bytes * 8);
}
if ((slice_id = svq3_get_ue_golomb(&s->gb_slice)) >= 3) {
if ((slice_id = get_interleaved_ue_golomb(&s->gb_slice)) >= 3) {
av_log(s->avctx, AV_LOG_ERROR, "illegal slice type %u \n", slice_id);
return -1;
}
@ -1245,12 +1245,12 @@ static av_cold int svq3_decode_init(AVCodecContext *avctx)
avctx->has_b_frames = !s->low_delay;
if (s->has_watermark) {
#if CONFIG_ZLIB
unsigned watermark_width = svq3_get_ue_golomb(&gb);
unsigned watermark_height = svq3_get_ue_golomb(&gb);
int u1 = svq3_get_ue_golomb(&gb);
unsigned watermark_width = get_interleaved_ue_golomb(&gb);
unsigned watermark_height = get_interleaved_ue_golomb(&gb);
int u1 = get_interleaved_ue_golomb(&gb);
int u2 = get_bits(&gb, 8);
int u3 = get_bits(&gb, 2);
int u4 = svq3_get_ue_golomb(&gb);
int u4 = get_interleaved_ue_golomb(&gb);
unsigned long buf_len = watermark_width *
watermark_height * 4;
int offset = get_bits_count(&gb) + 7 >> 3;
@ -1542,7 +1542,7 @@ static int svq3_decode_frame(AVCodecContext *avctx, void *data,
/* TODO: support s->mb_skip_run */
}
mb_type = svq3_get_ue_golomb(&s->gb_slice);
mb_type = get_interleaved_ue_golomb(&s->gb_slice);
if (s->pict_type == AV_PICTURE_TYPE_I)
mb_type += 8;

View File

@ -20,6 +20,7 @@
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/common.h"
#include "libavutil/log.h"
#include "libavutil/pixdesc.h"
@ -887,6 +888,122 @@ fail:
return err;
}
static av_cold int vaapi_encode_check_config(AVCodecContext *avctx)
{
VAAPIEncodeContext *ctx = avctx->priv_data;
VAStatus vas;
int i, n, err;
VAProfile *profiles = NULL;
VAEntrypoint *entrypoints = NULL;
VAConfigAttrib attr[] = {
{ VAConfigAttribRateControl },
{ VAConfigAttribEncMaxRefFrames },
};
n = vaMaxNumProfiles(ctx->hwctx->display);
profiles = av_malloc_array(n, sizeof(VAProfile));
if (!profiles) {
err = AVERROR(ENOMEM);
goto fail;
}
vas = vaQueryConfigProfiles(ctx->hwctx->display, profiles, &n);
if (vas != VA_STATUS_SUCCESS) {
av_log(ctx, AV_LOG_ERROR, "Failed to query profiles: %d (%s).\n",
vas, vaErrorStr(vas));
err = AVERROR(ENOSYS);
goto fail;
}
for (i = 0; i < n; i++) {
if (profiles[i] == ctx->va_profile)
break;
}
if (i >= n) {
av_log(ctx, AV_LOG_ERROR, "Encoding profile not found (%d).\n",
ctx->va_profile);
err = AVERROR(ENOSYS);
goto fail;
}
n = vaMaxNumEntrypoints(ctx->hwctx->display);
entrypoints = av_malloc_array(n, sizeof(VAEntrypoint));
if (!entrypoints) {
err = AVERROR(ENOMEM);
goto fail;
}
vas = vaQueryConfigEntrypoints(ctx->hwctx->display, ctx->va_profile,
entrypoints, &n);
if (vas != VA_STATUS_SUCCESS) {
av_log(ctx, AV_LOG_ERROR, "Failed to query entrypoints for "
"profile %u: %d (%s).\n", ctx->va_profile,
vas, vaErrorStr(vas));
err = AVERROR(ENOSYS);
goto fail;
}
for (i = 0; i < n; i++) {
if (entrypoints[i] == ctx->va_entrypoint)
break;
}
if (i >= n) {
av_log(ctx, AV_LOG_ERROR, "Encoding entrypoint not found "
"(%d / %d).\n", ctx->va_profile, ctx->va_entrypoint);
err = AVERROR(ENOSYS);
goto fail;
}
vas = vaGetConfigAttributes(ctx->hwctx->display,
ctx->va_profile, ctx->va_entrypoint,
attr, FF_ARRAY_ELEMS(attr));
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to fetch config "
"attributes: %d (%s).\n", vas, vaErrorStr(vas));
return AVERROR(EINVAL);
}
for (i = 0; i < FF_ARRAY_ELEMS(attr); i++) {
if (attr[i].value == VA_ATTRIB_NOT_SUPPORTED) {
// Unfortunately we have to treat this as "don't know" and hope
// for the best, because the Intel MJPEG encoder returns this
// for all the interesting attributes.
continue;
}
switch (attr[i].type) {
case VAConfigAttribRateControl:
if (!(ctx->va_rc_mode & attr[i].value)) {
av_log(avctx, AV_LOG_ERROR, "Rate control mode is not "
"supported: %x\n", attr[i].value);
err = AVERROR(EINVAL);
goto fail;
}
break;
case VAConfigAttribEncMaxRefFrames:
{
unsigned int ref_l0 = attr[i].value & 0xffff;
unsigned int ref_l1 = (attr[i].value >> 16) & 0xffff;
if (avctx->gop_size > 1 && ref_l0 < 1) {
av_log(avctx, AV_LOG_ERROR, "P frames are not "
"supported (%x).\n", attr[i].value);
err = AVERROR(EINVAL);
goto fail;
}
if (avctx->max_b_frames > 0 && ref_l1 < 1) {
av_log(avctx, AV_LOG_ERROR, "B frames are not "
"supported (%x).\n", attr[i].value);
err = AVERROR(EINVAL);
goto fail;
}
}
break;
}
}
err = 0;
fail:
av_freep(&profiles);
av_freep(&entrypoints);
return err;
}
av_cold int ff_vaapi_encode_init(AVCodecContext *avctx,
const VAAPIEncodeType *type)
{
@ -907,6 +1024,9 @@ av_cold int ff_vaapi_encode_init(AVCodecContext *avctx,
ctx->codec = type;
ctx->codec_options = ctx->codec_options_data;
ctx->va_config = VA_INVALID_ID;
ctx->va_context = VA_INVALID_ID;
ctx->priv_data = av_mallocz(type->priv_data_size);
if (!ctx->priv_data) {
err = AVERROR(ENOMEM);
@ -932,6 +1052,10 @@ av_cold int ff_vaapi_encode_init(AVCodecContext *avctx,
if (err < 0)
goto fail;
err = vaapi_encode_check_config(avctx);
if (err < 0)
goto fail;
vas = vaCreateConfig(ctx->hwctx->display,
ctx->va_profile, ctx->va_entrypoint,
ctx->config_attributes, ctx->nb_config_attributes,
@ -1088,11 +1212,15 @@ av_cold int ff_vaapi_encode_close(AVCodecContext *avctx)
vaapi_encode_free(avctx, pic);
}
if (ctx->va_context != VA_INVALID_ID)
if (ctx->va_context != VA_INVALID_ID) {
vaDestroyContext(ctx->hwctx->display, ctx->va_context);
ctx->va_context = VA_INVALID_ID;
}
if (ctx->va_config != VA_INVALID_ID)
if (ctx->va_config != VA_INVALID_ID) {
vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
ctx->va_config = VA_INVALID_ID;
}
if (ctx->codec->close)
ctx->codec->close(avctx);

View File

@ -101,8 +101,8 @@ typedef struct VAAPIEncodeH264Context {
int fixed_qp_p;
int fixed_qp_b;
int next_frame_num;
int64_t idr_pic_count;
int64_t last_idr_frame;
// Rate control configuration.
struct {
@ -126,6 +126,7 @@ typedef struct VAAPIEncodeH264Context {
typedef struct VAAPIEncodeH264Options {
int qp;
int quality;
int low_power;
} VAAPIEncodeH264Options;
@ -592,12 +593,17 @@ static int vaapi_encode_h264_init_picture_params(AVCodecContext *avctx,
if (pic->type == PICTURE_TYPE_IDR) {
av_assert0(pic->display_order == pic->encode_order);
priv->last_idr_frame = pic->display_order;
vpic->frame_num = 0;
priv->next_frame_num = 1;
} else {
av_assert0(pic->display_order > priv->last_idr_frame);
vpic->frame_num = priv->next_frame_num;
if (pic->type != PICTURE_TYPE_B) {
// nal_ref_idc != 0
++priv->next_frame_num;
}
}
vpic->frame_num = (pic->encode_order - priv->last_idr_frame) &
vpic->frame_num = vpic->frame_num &
((1 << (4 + vseq->seq_fields.bits.log2_max_frame_num_minus4)) - 1);
vpic->CurrPic.picture_id = pic->recon_surface;
@ -608,10 +614,9 @@ static int vaapi_encode_h264_init_picture_params(AVCodecContext *avctx,
for (i = 0; i < pic->nb_refs; i++) {
VAAPIEncodePicture *ref = pic->refs[i];
av_assert0(ref && ref->encode_order >= priv->last_idr_frame);
av_assert0(ref && ref->encode_order < pic->encode_order);
vpic->ReferenceFrames[i].picture_id = ref->recon_surface;
vpic->ReferenceFrames[i].frame_idx =
ref->encode_order - priv->last_idr_frame;
vpic->ReferenceFrames[i].frame_idx = ref->encode_order;
vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
vpic->ReferenceFrames[i].TopFieldOrderCnt = ref->display_order;
vpic->ReferenceFrames[i].BottomFieldOrderCnt = ref->display_order;
@ -856,7 +861,17 @@ static av_cold int vaapi_encode_h264_init_internal(AVCodecContext *avctx)
avctx->profile);
return AVERROR(EINVAL);
}
ctx->va_entrypoint = VAEntrypointEncSlice;
if (opt->low_power) {
#if VA_CHECK_VERSION(0, 39, 1)
ctx->va_entrypoint = VAEntrypointEncSliceLP;
#else
av_log(avctx, AV_LOG_ERROR, "Low-power encoding is not "
"supported with this VAAPI version.\n");
return AVERROR(EINVAL);
#endif
} else {
ctx->va_entrypoint = VAEntrypointEncSlice;
}
ctx->input_width = avctx->width;
ctx->input_height = avctx->height;
@ -939,7 +954,10 @@ static const AVOption vaapi_encode_h264_options[] = {
{ "qp", "Constant QP (for P-frames; scaled by qfactor/qoffset for I/B)",
OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 20 }, 0, 52, FLAGS },
{ "quality", "Set encode quality (trades off against speed, higher is faster)",
OFFSET(quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 2, FLAGS },
OFFSET(quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 8, FLAGS },
{ "low_power", "Use low-power encoding mode (experimental: only supported "
"on some platforms, does not support all features)",
OFFSET(low_power), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
{ NULL },
};

View File

@ -28,8 +28,8 @@
#include "libavutil/version.h"
#define LIBAVCODEC_VERSION_MAJOR 57
#define LIBAVCODEC_VERSION_MINOR 47
#define LIBAVCODEC_VERSION_MICRO 100
#define LIBAVCODEC_VERSION_MINOR 48
#define LIBAVCODEC_VERSION_MICRO 101
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \
@ -217,6 +217,12 @@
#ifndef FF_API_OLD_BSF
#define FF_API_OLD_BSF (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_COPY_CONTEXT
#define FF_API_COPY_CONTEXT (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_GET_CONTEXT_DEFAULTS
#define FF_API_GET_CONTEXT_DEFAULTS (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_NVENC_OLD_NAME
#define FF_API_NVENC_OLD_NAME (LIBAVCODEC_VERSION_MAJOR < 59)
#endif

View File

@ -21,7 +21,6 @@
#include "config.h"
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavcodec/audiodsp.h"

View File

@ -24,7 +24,6 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavcodec/fmtconvert.h"

View File

@ -24,7 +24,6 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavcodec/avcodec.h"
#include "libavcodec/hpeldsp.h"

View File

@ -19,7 +19,6 @@
#include "config.h"
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavcodec/mpegvideodsp.h"
#include "libavcodec/videodsp.h"

View File

@ -21,7 +21,6 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavcodec/rv34dsp.h"

View File

@ -22,7 +22,6 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavcodec/vp56dsp.h"

View File

@ -23,7 +23,6 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/mem.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavcodec/vp8dsp.h"

472
libavcodec/ylc.c Normal file
View File

@ -0,0 +1,472 @@
/*
* YUY2 Lossless Codec
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "libavutil/imgutils.h"
#include "libavutil/internal.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/mem.h"
#include "avcodec.h"
#include "bswapdsp.h"
#include "get_bits.h"
#include "huffyuvdsp.h"
#include "internal.h"
#include "unary.h"
typedef struct YLCContext {
VLC vlc[4];
uint32_t table[1024];
uint8_t *table_bits;
uint8_t *bitstream_bits;
int table_bits_size;
int bitstream_bits_size;
BswapDSPContext bdsp;
} YLCContext;
static av_cold int decode_init(AVCodecContext *avctx)
{
YLCContext *s = avctx->priv_data;
avctx->pix_fmt = AV_PIX_FMT_YUYV422;
ff_bswapdsp_init(&s->bdsp);
return 0;
}
typedef struct Node {
int16_t sym;
int16_t n0;
uint32_t count;
int16_t l, r;
} Node;
static void get_tree_codes(uint32_t *bits, int16_t *lens, uint8_t *xlat,
Node *nodes, int node,
uint32_t pfx, int pl, int *pos)
{
int s;
s = nodes[node].sym;
if (s != -1) {
bits[*pos] = (~pfx) & ((1 << FFMAX(pl, 1)) - 1);
lens[*pos] = FFMAX(pl, 1);
xlat[*pos] = s + (pl == 0);
(*pos)++;
} else {
pfx <<= 1;
pl++;
get_tree_codes(bits, lens, xlat, nodes, nodes[node].l, pfx, pl,
pos);
pfx |= 1;
get_tree_codes(bits, lens, xlat, nodes, nodes[node].r, pfx, pl,
pos);
}
}
static int build_vlc(AVCodecContext *avctx, VLC *vlc, const uint32_t *table)
{
Node nodes[512];
uint32_t bits[256];
int16_t lens[256];
uint8_t xlat[256];
int cur_node, i, j, pos = 0;
ff_free_vlc(vlc);
for (i = 0; i < 256; i++) {
nodes[i].count = table[i];
nodes[i].sym = i;
nodes[i].n0 = -2;
nodes[i].l = i;
nodes[i].r = i;
}
cur_node = 256;
j = 0;
do {
for (i = 0; ; i++) {
int new_node = j;
int first_node = cur_node;
int second_node = cur_node;
int nd, st;
nodes[cur_node].count = -1;
do {
int val = nodes[new_node].count;
if (val && (val < nodes[first_node].count)) {
if (val >= nodes[second_node].count) {
first_node = new_node;
} else {
first_node = second_node;
second_node = new_node;
}
}
new_node += 1;
} while (new_node != cur_node);
if (first_node == cur_node)
break;
nd = nodes[second_node].count;
st = nodes[first_node].count;
nodes[second_node].count = 0;
nodes[first_node].count = 0;
nodes[cur_node].count = nd + st;
nodes[cur_node].sym = -1;
nodes[cur_node].n0 = cur_node;
nodes[cur_node].l = first_node;
nodes[cur_node].r = second_node;
cur_node++;
}
j++;
} while (cur_node - 256 == j);
get_tree_codes(bits, lens, xlat, nodes, cur_node - 1, 0, 0, &pos);
return ff_init_vlc_sparse(vlc, 10, pos, lens, 2, 2, bits, 4, 4, xlat, 1, 1, 0);
}
static const uint8_t table_y1[] = {
0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE,
0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE,
0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE,
0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE,
0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE,
0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02,
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
0x02, 0x00,
};
static const uint8_t table_u[] = {
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x01, 0x00,
};
static const uint8_t table_y2[] = {
0xFC, 0xFC, 0xFC, 0xFD, 0xFD, 0xFD, 0xFE, 0xFE,
0xFE, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFC,
0xFC, 0xFC, 0xFD, 0xFD, 0xFD, 0xFE, 0xFE, 0xFE,
0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFC, 0xFC,
0xFC, 0xFD, 0xFD, 0xFD, 0xFE, 0xFE, 0xFE, 0xFF,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFD, 0xFD, 0xFD,
0xFE, 0xFE, 0xFE, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
0x00, 0x01, 0x01, 0x01, 0xFD, 0xFD, 0xFD, 0xFE,
0xFE, 0xFE, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
0x01, 0x01, 0x01, 0xFD, 0xFD, 0xFD, 0xFE, 0xFE,
0xFE, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
0x01, 0x01, 0xFE, 0xFE, 0xFE, 0xFF, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x02, 0x02,
0x02, 0xFE, 0xFE, 0xFE, 0xFF, 0xFF, 0xFF, 0x00,
0x00, 0x00, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02,
0xFE, 0xFE, 0xFE, 0xFF, 0xFF, 0xFF, 0x00, 0x00,
0x00, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0xFF,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01,
0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0xFF, 0xFF,
0xFF, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x02,
0x02, 0x02, 0x03, 0x03, 0x03, 0xFF, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x02, 0x02,
0x02, 0x03, 0x03, 0x03, 0x00, 0x00, 0x00, 0x01,
0x01, 0x01, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03,
0x04, 0x04, 0x04, 0x00, 0x00, 0x00, 0x01, 0x01,
0x01, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x04,
0x04, 0x04, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01,
0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x04, 0x04,
0x04, 0x00,
};
static const uint8_t table_v[] = {
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF,
0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01,
0xFF, 0x00, 0x01, 0xFF, 0x00, 0x01, 0xFF, 0x00,
0x01, 0x00,
};
static int decode_frame(AVCodecContext *avctx,
void *data, int *got_frame,
AVPacket *avpkt)
{
int TL[4] = { 128, 128, 128, 128 };
int L[4] = { 128, 128, 128, 128 };
YLCContext *s = avctx->priv_data;
const uint8_t *buf = avpkt->data;
int ret, x, y, toffset, boffset;
AVFrame * const p = data;
GetBitContext gb;
uint8_t *dst;
if (avpkt->size <= 16)
return AVERROR_INVALIDDATA;
if (AV_RL32(buf) != MKTAG('Y', 'L', 'C', '0') ||
AV_RL32(buf + 4) != 0)
return AVERROR_INVALIDDATA;
toffset = AV_RL32(buf + 8);
if (toffset < 16 || toffset >= avpkt->size)
return AVERROR_INVALIDDATA;
boffset = AV_RL32(buf + 12);
if (toffset >= boffset || boffset >= avpkt->size)
return AVERROR_INVALIDDATA;
if ((ret = ff_get_buffer(avctx, p, 0)) < 0)
return ret;
av_fast_malloc(&s->table_bits, &s->table_bits_size,
boffset - toffset + AV_INPUT_BUFFER_PADDING_SIZE);
if (!s->table_bits)
return AVERROR(ENOMEM);
memcpy(s->table_bits, avpkt->data + toffset, boffset - toffset);
memset(s->table_bits + boffset - toffset, 0, AV_INPUT_BUFFER_PADDING_SIZE);
s->bdsp.bswap_buf((uint32_t *) s->table_bits,
(uint32_t *) s->table_bits,
(boffset - toffset + 3) >> 2);
if ((ret = init_get_bits8(&gb, s->table_bits, boffset - toffset)) < 0)
return ret;
for (x = 0; x < 1024; x++) {
unsigned len = get_unary(&gb, 1, 31);
uint32_t val = ((1U << len) - 1) + get_bits_long(&gb, len);
s->table[x] = val;
}
ret = build_vlc(avctx, &s->vlc[0], &s->table[0 ]);
if (ret < 0)
return ret;
ret = build_vlc(avctx, &s->vlc[1], &s->table[256]);
if (ret < 0)
return ret;
ret = build_vlc(avctx, &s->vlc[2], &s->table[512]);
if (ret < 0)
return ret;
ret = build_vlc(avctx, &s->vlc[3], &s->table[768]);
if (ret < 0)
return ret;
av_fast_malloc(&s->bitstream_bits, &s->bitstream_bits_size,
avpkt->size - boffset + AV_INPUT_BUFFER_PADDING_SIZE);
if (!s->bitstream_bits)
return AVERROR(ENOMEM);
memcpy(s->bitstream_bits, avpkt->data + boffset, avpkt->size - boffset);
memset(s->bitstream_bits + avpkt->size - boffset, 0, AV_INPUT_BUFFER_PADDING_SIZE);
s->bdsp.bswap_buf((uint32_t *) s->bitstream_bits,
(uint32_t *) s->bitstream_bits,
(avpkt->size - boffset) >> 2);
if ((ret = init_get_bits8(&gb, s->bitstream_bits, avpkt->size - boffset)) < 0)
return ret;
dst = p->data[0];
for (y = 0; y < avctx->height; y++) {
memset(dst, 0, avctx->width * 2);
dst += p->linesize[0];
}
dst = p->data[0];
for (y = 0; y < avctx->height; y++) {
for (x = 0; x < avctx->width * 2 && y < avctx->height;) {
if (get_bits_left(&gb) <= 0)
return AVERROR_INVALIDDATA;
if (get_bits1(&gb)) {
int val = get_vlc2(&gb, s->vlc[0].table, s->vlc[0].bits, 3);
if (val < 0) {
return AVERROR_INVALIDDATA;
} else if (val < 0xE1) {
dst[x ] = table_y1[val];
dst[x + 1] = table_u[val];
dst[x + 2] = table_y2[val];
dst[x + 3] = table_v[val];
x += 4;
} else {
int incr = (val - 0xDF) * 4;
if (x + incr >= avctx->width * 2) {
int iy = ((x + incr) / (avctx->width * 2));
x = (x + incr) % (avctx->width * 2);
y += iy;
dst += iy * p->linesize[0];
} else {
x += incr;
}
}
} else {
int y1, y2, u, v;
y1 = get_vlc2(&gb, s->vlc[1].table, s->vlc[1].bits, 3);
u = get_vlc2(&gb, s->vlc[2].table, s->vlc[2].bits, 3);
y2 = get_vlc2(&gb, s->vlc[1].table, s->vlc[1].bits, 3);
v = get_vlc2(&gb, s->vlc[3].table, s->vlc[3].bits, 3);
if (y1 < 0 || y2 < 0 || u < 0 || v < 0)
return AVERROR_INVALIDDATA;
dst[x ] = y1;
dst[x + 1] = u;
dst[x + 2] = y1 + y2;
dst[x + 3] = v;
x += 4;
}
}
dst += p->linesize[0];
}
dst = p->data[0];
for (x = 0; x < avctx->width * 2; x += 4) {
dst[x ] = dst[x ] + L[0];
dst[x + 2] = L[0] = dst[x + 2] + L[0];
L[1] = dst[x + 1] + L[1];
dst[x + 1] = L[1];
L[2] = dst[x + 3] + L[2];
dst[x + 3] = L[2];
}
dst += p->linesize[0];
for (y = 1; y < avctx->height; y++) {
x = 0;
dst[x ] = dst[x ] + L[0] + dst[x + 0 - p->linesize[0]] - TL[0];
dst[x + 2] = L[0] = dst[x + 2] + L[0] + dst[x + 2 - p->linesize[0]] - TL[0];
TL[0] = dst[x + 2 - p->linesize[0]];
L[1] = dst[x + 1] + L[1] + dst[x + 1 - p->linesize[0]] - TL[1];
dst[x + 1] = L[1];
TL[1] = dst[x + 1 - p->linesize[0]];
L[2] = dst[x + 3] + L[2] + dst[x + 3 - p->linesize[0]] - TL[2];
dst[x + 3] = L[2];
TL[2] = dst[x + 3 - p->linesize[0]];
for (x = 4; x < avctx->width * 2; x += 4) {
dst[x ] = dst[x ] + L[0] + dst[x + 0 - p->linesize[0]] - TL[0];
dst[x + 2] = L[0] = dst[x + 2] + L[0] + dst[x + 2 - p->linesize[0]] - TL[0];
TL[0] = dst[x + 2 - p->linesize[0]];
L[1] = dst[x + 1] + L[1] + dst[x + 1 - p->linesize[0]] - TL[1];
dst[x + 1] = L[1];
TL[1] = dst[x + 1 - p->linesize[0]];
L[2] = dst[x + 3] + L[2] + dst[x + 3 - p->linesize[0]] - TL[2];
dst[x + 3] = L[2];
TL[2] = dst[x + 3 - p->linesize[0]];
}
dst += p->linesize[0];
}
p->pict_type = AV_PICTURE_TYPE_I;
p->key_frame = 1;
*got_frame = 1;
return avpkt->size;
}
static av_cold int decode_end(AVCodecContext *avctx)
{
YLCContext *s = avctx->priv_data;
ff_free_vlc(&s->vlc[0]);
ff_free_vlc(&s->vlc[1]);
ff_free_vlc(&s->vlc[2]);
ff_free_vlc(&s->vlc[3]);
return 0;
}
AVCodec ff_ylc_decoder = {
.name = "ylc",
.long_name = NULL_IF_CONFIG_SMALL("YUY2 Lossless Codec"),
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_YLC,
.priv_data_size = sizeof(YLCContext),
.init = decode_init,
.close = decode_end,
.decode = decode_frame,
.capabilities = AV_CODEC_CAP_DR1,
};

View File

@ -22,7 +22,6 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/mem.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavfilter/gradfun.h"

View File

@ -22,7 +22,6 @@
#include "libavutil/cpu.h"
#include "libavutil/internal.h"
#include "libavutil/mem.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavfilter/interlace.h"

View File

@ -21,7 +21,6 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/mem.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavfilter/yadif.h"

View File

@ -203,15 +203,15 @@
* avio_open2() or a custom one.
* - Unless the format is of the AVFMT_NOSTREAMS type, at least one stream must
* be created with the avformat_new_stream() function. The caller should fill
* the @ref AVStream.codec "stream codec context" information, such as the
* codec @ref AVCodecContext.codec_type "type", @ref AVCodecContext.codec_id
* the @ref AVStream.codecpar "stream codec parameters" information, such as the
* codec @ref AVCodecParameters.codec_type "type", @ref AVCodecParameters.codec_id
* "id" and other parameters (e.g. width / height, the pixel or sample format,
* etc.) as known. The @ref AVStream.time_base "stream timebase" should
* be set to the timebase that the caller desires to use for this stream (note
* that the timebase actually used by the muxer can be different, as will be
* described later).
* - It is advised to manually initialize only the relevant fields in
* AVCodecContext, rather than using @ref avcodec_copy_context() during
* AVCodecParameters, rather than using @ref avcodec_parameters_copy() during
* remuxing: there is no guarantee that the codec context values remain valid
* for both input and output format contexts.
* - The caller may fill in additional information, such as @ref

View File

@ -252,7 +252,6 @@ static void dump_replaygain(void *ctx, AVPacketSideData *sd)
static void dump_stereo3d(void *ctx, AVPacketSideData *sd)
{
AVStereo3D *stereo;
const char *name;
if (sd->size < sizeof(*stereo)) {
av_log(ctx, AV_LOG_INFO, "invalid data");

View File

@ -424,6 +424,7 @@ const AVCodecTag ff_codec_bmp_tags[] = {
{ AV_CODEC_ID_M101, MKTAG('M', '1', '0', '1') },
{ AV_CODEC_ID_M101, MKTAG('M', '1', '0', '2') },
{ AV_CODEC_ID_MAGICYUV, MKTAG('M', 'A', 'G', 'Y') },
{ AV_CODEC_ID_YLC, MKTAG('Y', 'L', 'C', '0') },
{ AV_CODEC_ID_NONE, 0 }
};

View File

@ -60,19 +60,19 @@ static void send_picture(AVFormatContext *ctx, const uint8_t *buf, int size, int
second_field = interlaced && (pic_nr & 0x01);
init_get_bits(&gc, buf, 8 * size);
svq3_get_ue_golomb(&gc); /* wavelet_idx */
wavelet_depth = svq3_get_ue_golomb(&gc);
svq3_get_ue_golomb(&gc); /* num_x */
svq3_get_ue_golomb(&gc); /* num_y */
prefix_bytes = svq3_get_ue_golomb(&gc);
size_scaler = svq3_get_ue_golomb(&gc);
get_interleaved_ue_golomb(&gc); /* wavelet_idx */
wavelet_depth = get_interleaved_ue_golomb(&gc);
get_interleaved_ue_golomb(&gc); /* num_x */
get_interleaved_ue_golomb(&gc); /* num_y */
prefix_bytes = get_interleaved_ue_golomb(&gc);
size_scaler = get_interleaved_ue_golomb(&gc);
/* pass the quantization matrices */
svq3_get_ue_golomb(&gc);
get_interleaved_ue_golomb(&gc);
for(lvl = 0; lvl < wavelet_depth; lvl++)
{
svq3_get_ue_golomb(&gc);
svq3_get_ue_golomb(&gc);
svq3_get_ue_golomb(&gc);
get_interleaved_ue_golomb(&gc);
get_interleaved_ue_golomb(&gc);
get_interleaved_ue_golomb(&gc);
}
frag_len = (get_bits_count(&gc) + 7) / 8; /* length of transform parameters */

View File

@ -248,7 +248,7 @@ static int sdp_parse_rtpmap(AVFormatContext *s,
AVCodecParameters *par = st->codecpar;
char buf[256];
int i;
AVCodec *c;
const AVCodecDescriptor *desc;
const char *c_name;
/* See if we can handle this kind of payload.
@ -274,9 +274,9 @@ static int sdp_parse_rtpmap(AVFormatContext *s,
par->codec_id = ff_rtp_codec_id(buf, par->codec_type);
}
c = avcodec_find_decoder(par->codec_id);
if (c && c->name)
c_name = c->name;
desc = avcodec_descriptor_get(par->codec_id);
if (desc && desc->name)
c_name = desc->name;
else
c_name = "(null)";

View File

@ -452,3 +452,39 @@ void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
}
av_freep(constraints);
}
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type,
const char *device, AVDictionary *opts, int flags)
{
AVBufferRef *device_ref = NULL;
AVHWDeviceContext *device_ctx;
int ret = 0;
device_ref = av_hwdevice_ctx_alloc(type);
if (!device_ref) {
ret = AVERROR(ENOMEM);
goto fail;
}
device_ctx = (AVHWDeviceContext*)device_ref->data;
if (!device_ctx->internal->hw_type->device_create) {
ret = AVERROR(ENOSYS);
goto fail;
}
ret = device_ctx->internal->hw_type->device_create(device_ctx, device,
opts, flags);
if (ret < 0)
goto fail;
ret = av_hwdevice_ctx_init(device_ref);
if (ret < 0)
goto fail;
*pdevice_ref = device_ref;
return 0;
fail:
av_buffer_unref(&device_ref);
*pdevice_ref = NULL;
return ret;
}

View File

@ -241,6 +241,34 @@ AVBufferRef *av_hwdevice_ctx_alloc(enum AVHWDeviceType type);
*/
int av_hwdevice_ctx_init(AVBufferRef *ref);
/**
* Open a device of the specified type and create an AVHWDeviceContext for it.
*
* This is a convenience function intended to cover the simple cases. Callers
* who need to fine-tune device creation/management should open the device
* manually and then wrap it in an AVHWDeviceContext using
* av_hwdevice_ctx_alloc()/av_hwdevice_ctx_init().
*
* The returned context is already initialized and ready for use, the caller
* should not call av_hwdevice_ctx_init() on it. The user_opaque/free fields of
* the created AVHWDeviceContext are set by this function and should not be
* touched by the caller.
*
* @param device_ctx On success, a reference to the newly-created device context
* will be written here. The reference is owned by the caller
* and must be released with av_buffer_unref() when no longer
* needed. On failure, NULL will be written to this pointer.
* @param type The type of the device to create.
* @param device A type-specific string identifying the device to open.
* @param opts A dictionary of additional (type-specific) options to use in
* opening the device. The dictionary remains owned by the caller.
* @param flags currently unused
*
* @return 0 on success, a negative AVERROR code on failure.
*/
int av_hwdevice_ctx_create(AVBufferRef **device_ctx, enum AVHWDeviceType type,
const char *device, AVDictionary *opts, int flags);
/**
* Allocate an AVHWFramesContext tied to a given device context.
*

View File

@ -253,6 +253,49 @@ static int cuda_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
return 0;
}
static void cuda_device_free(AVHWDeviceContext *ctx)
{
AVCUDADeviceContext *hwctx = ctx->hwctx;
cuCtxDestroy(hwctx->cuda_ctx);
}
static int cuda_device_create(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags)
{
AVCUDADeviceContext *hwctx = ctx->hwctx;
CUdevice cu_device;
CUcontext dummy;
CUresult err;
int device_idx = 0;
if (device)
device_idx = strtol(device, NULL, 0);
err = cuInit(0);
if (err != CUDA_SUCCESS) {
av_log(ctx, AV_LOG_ERROR, "Could not initialize the CUDA driver API\n");
return AVERROR_UNKNOWN;
}
err = cuDeviceGet(&cu_device, device_idx);
if (err != CUDA_SUCCESS) {
av_log(ctx, AV_LOG_ERROR, "Could not get the device number %d\n", device_idx);
return AVERROR_UNKNOWN;
}
err = cuCtxCreate(&hwctx->cuda_ctx, 0, cu_device);
if (err != CUDA_SUCCESS) {
av_log(ctx, AV_LOG_ERROR, "Error creating a CUDA context\n");
return AVERROR_UNKNOWN;
}
cuCtxPopCurrent(&dummy);
ctx->free = cuda_device_free;
return 0;
}
const HWContextType ff_hwcontext_type_cuda = {
.type = AV_HWDEVICE_TYPE_CUDA,
.name = "CUDA",
@ -260,6 +303,7 @@ const HWContextType ff_hwcontext_type_cuda = {
.device_hwctx_size = sizeof(AVCUDADeviceContext),
.frames_priv_size = sizeof(CUDAFramesContext),
.device_create = cuda_device_create,
.frames_init = cuda_frames_init,
.frames_get_buffer = cuda_get_buffer,
.transfer_get_formats = cuda_transfer_get_formats,

View File

@ -38,6 +38,9 @@
#include "pixdesc.h"
#include "pixfmt.h"
typedef IDirect3D9* WINAPI pDirect3DCreate9(UINT);
typedef HRESULT WINAPI pCreateDeviceManager9(UINT *, IDirect3DDeviceManager9 **);
typedef struct DXVA2FramesContext {
IDirect3DSurface9 **surfaces_internal;
int nb_surfaces_used;
@ -48,6 +51,16 @@ typedef struct DXVA2FramesContext {
D3DFORMAT format;
} DXVA2FramesContext;
typedef struct DXVA2DevicePriv {
HMODULE d3dlib;
HMODULE dxva2lib;
HANDLE device_handle;
IDirect3D9 *d3d9;
IDirect3DDevice9 *d3d9device;
} DXVA2DevicePriv;
static const struct {
D3DFORMAT d3d_format;
enum AVPixelFormat pix_fmt;
@ -287,6 +300,125 @@ static int dxva2_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
return 0;
}
static void dxva2_device_free(AVHWDeviceContext *ctx)
{
AVDXVA2DeviceContext *hwctx = ctx->hwctx;
DXVA2DevicePriv *priv = ctx->user_opaque;
if (hwctx->devmgr && priv->device_handle != INVALID_HANDLE_VALUE)
IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, priv->device_handle);
if (hwctx->devmgr)
IDirect3DDeviceManager9_Release(hwctx->devmgr);
if (priv->d3d9device)
IDirect3DDevice9_Release(priv->d3d9device);
if (priv->d3d9)
IDirect3D9_Release(priv->d3d9);
if (priv->d3dlib)
FreeLibrary(priv->d3dlib);
if (priv->dxva2lib)
FreeLibrary(priv->dxva2lib);
av_freep(&ctx->user_opaque);
}
static int dxva2_device_create(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags)
{
AVDXVA2DeviceContext *hwctx = ctx->hwctx;
DXVA2DevicePriv *priv;
pDirect3DCreate9 *createD3D = NULL;
pCreateDeviceManager9 *createDeviceManager = NULL;
D3DPRESENT_PARAMETERS d3dpp = {0};
D3DDISPLAYMODE d3ddm;
unsigned resetToken = 0;
UINT adapter = D3DADAPTER_DEFAULT;
HRESULT hr;
if (device)
adapter = atoi(device);
priv = av_mallocz(sizeof(*priv));
if (!priv)
return AVERROR(ENOMEM);
ctx->user_opaque = priv;
ctx->free = dxva2_device_free;
priv->device_handle = INVALID_HANDLE_VALUE;
priv->d3dlib = LoadLibrary("d3d9.dll");
if (!priv->d3dlib) {
av_log(ctx, AV_LOG_ERROR, "Failed to load D3D9 library\n");
return AVERROR_UNKNOWN;
}
priv->dxva2lib = LoadLibrary("dxva2.dll");
if (!priv->dxva2lib) {
av_log(ctx, AV_LOG_ERROR, "Failed to load DXVA2 library\n");
return AVERROR_UNKNOWN;
}
createD3D = (pDirect3DCreate9 *)GetProcAddress(priv->d3dlib, "Direct3DCreate9");
if (!createD3D) {
av_log(ctx, AV_LOG_ERROR, "Failed to locate Direct3DCreate9\n");
return AVERROR_UNKNOWN;
}
createDeviceManager = (pCreateDeviceManager9 *)GetProcAddress(priv->dxva2lib,
"DXVA2CreateDirect3DDeviceManager9");
if (!createDeviceManager) {
av_log(ctx, AV_LOG_ERROR, "Failed to locate DXVA2CreateDirect3DDeviceManager9\n");
return AVERROR_UNKNOWN;
}
priv->d3d9 = createD3D(D3D_SDK_VERSION);
if (!priv->d3d9) {
av_log(ctx, AV_LOG_ERROR, "Failed to create IDirect3D object\n");
return AVERROR_UNKNOWN;
}
IDirect3D9_GetAdapterDisplayMode(priv->d3d9, adapter, &d3ddm);
d3dpp.Windowed = TRUE;
d3dpp.BackBufferWidth = 640;
d3dpp.BackBufferHeight = 480;
d3dpp.BackBufferCount = 0;
d3dpp.BackBufferFormat = d3ddm.Format;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.Flags = D3DPRESENTFLAG_VIDEO;
hr = IDirect3D9_CreateDevice(priv->d3d9, adapter, D3DDEVTYPE_HAL, GetDesktopWindow(),
D3DCREATE_SOFTWARE_VERTEXPROCESSING | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE,
&d3dpp, &priv->d3d9device);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device\n");
return AVERROR_UNKNOWN;
}
hr = createDeviceManager(&resetToken, &hwctx->devmgr);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device manager\n");
return AVERROR_UNKNOWN;
}
hr = IDirect3DDeviceManager9_ResetDevice(hwctx->devmgr, priv->d3d9device, resetToken);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Failed to bind Direct3D device to device manager\n");
return AVERROR_UNKNOWN;
}
hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &priv->device_handle);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Failed to open device handle\n");
return AVERROR_UNKNOWN;
}
return 0;
}
const HWContextType ff_hwcontext_type_dxva2 = {
.type = AV_HWDEVICE_TYPE_DXVA2,
.name = "DXVA2",
@ -295,6 +427,7 @@ const HWContextType ff_hwcontext_type_dxva2 = {
.frames_hwctx_size = sizeof(AVDXVA2FramesContext),
.frames_priv_size = sizeof(DXVA2FramesContext),
.device_create = dxva2_device_create,
.frames_init = dxva2_frames_init,
.frames_uninit = dxva2_frames_uninit,
.frames_get_buffer = dxva2_get_buffer,

View File

@ -64,6 +64,9 @@ typedef struct HWContextType {
*/
size_t frames_priv_size;
int (*device_create)(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags);
int (*device_init)(AVHWDeviceContext *ctx);
void (*device_uninit)(AVHWDeviceContext *ctx);

View File

@ -16,6 +16,21 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config.h"
#if HAVE_VAAPI_X11
# include <va/va_x11.h>
#endif
#if HAVE_VAAPI_DRM
# include <va/va_drm.h>
#endif
#include <fcntl.h>
#if HAVE_UNISTD_H
# include <unistd.h>
#endif
#include "avassert.h"
#include "buffer.h"
#include "common.h"
@ -26,6 +41,14 @@
#include "pixdesc.h"
#include "pixfmt.h"
typedef struct VAAPIDevicePriv {
#if HAVE_VAAPI_X11
Display *x11_display;
#endif
int drm_fd;
} VAAPIDevicePriv;
typedef struct VAAPISurfaceFormat {
enum AVPixelFormat pix_fmt;
VAImageFormat image_format;
@ -823,6 +846,105 @@ fail:
return err;
}
static void vaapi_device_free(AVHWDeviceContext *ctx)
{
AVVAAPIDeviceContext *hwctx = ctx->hwctx;
VAAPIDevicePriv *priv = ctx->user_opaque;
if (hwctx->display)
vaTerminate(hwctx->display);
#if HAVE_VAAPI_X11
if (priv->x11_display)
XCloseDisplay(priv->x11_display);
#endif
if (priv->drm_fd >= 0)
close(priv->drm_fd);
av_freep(&priv);
}
static int vaapi_device_create(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags)
{
AVVAAPIDeviceContext *hwctx = ctx->hwctx;
VAAPIDevicePriv *priv;
VADisplay display = 0;
VAStatus vas;
int major, minor;
priv = av_mallocz(sizeof(*priv));
if (!priv)
return AVERROR(ENOMEM);
priv->drm_fd = -1;
ctx->user_opaque = priv;
ctx->free = vaapi_device_free;
#if HAVE_VAAPI_X11
if (!display && !(device && device[0] == '/')) {
// Try to open the device as an X11 display.
priv->x11_display = XOpenDisplay(device);
if (!priv->x11_display) {
av_log(ctx, AV_LOG_VERBOSE, "Cannot open X11 display "
"%s.\n", XDisplayName(device));
} else {
display = vaGetDisplay(priv->x11_display);
if (!display) {
av_log(ctx, AV_LOG_ERROR, "Cannot open a VA display "
"from X11 display %s.\n", XDisplayName(device));
return AVERROR_UNKNOWN;
}
av_log(ctx, AV_LOG_VERBOSE, "Opened VA display via "
"X11 display %s.\n", XDisplayName(device));
}
}
#endif
#if HAVE_VAAPI_DRM
if (!display && device) {
// Try to open the device as a DRM path.
priv->drm_fd = open(device, O_RDWR);
if (priv->drm_fd < 0) {
av_log(ctx, AV_LOG_VERBOSE, "Cannot open DRM device %s.\n",
device);
} else {
display = vaGetDisplayDRM(priv->drm_fd);
if (!display) {
av_log(ctx, AV_LOG_ERROR, "Cannot open a VA display "
"from DRM device %s.\n", device);
return AVERROR_UNKNOWN;
}
av_log(ctx, AV_LOG_VERBOSE, "Opened VA display via "
"DRM device %s.\n", device);
}
}
#endif
if (!display) {
av_log(ctx, AV_LOG_ERROR, "No VA display found for "
"device: %s.\n", device ? device : "");
return AVERROR(EINVAL);
}
hwctx->display = display;
vas = vaInitialize(display, &major, &minor);
if (vas != VA_STATUS_SUCCESS) {
av_log(ctx, AV_LOG_ERROR, "Failed to initialise VAAPI "
"connection: %d (%s).\n", vas, vaErrorStr(vas));
return AVERROR(EIO);
}
av_log(ctx, AV_LOG_VERBOSE, "Initialised VAAPI connection: "
"version %d.%d\n", major, minor);
return 0;
}
const HWContextType ff_hwcontext_type_vaapi = {
.type = AV_HWDEVICE_TYPE_VAAPI,
.name = "VAAPI",
@ -833,6 +955,7 @@ const HWContextType ff_hwcontext_type_vaapi = {
.frames_hwctx_size = sizeof(AVVAAPIFramesContext),
.frames_priv_size = sizeof(VAAPIFramesContext),
.device_create = &vaapi_device_create,
.device_init = &vaapi_device_init,
.device_uninit = &vaapi_device_uninit,
.frames_get_constraints = &vaapi_frames_get_constraints,

View File

@ -16,6 +16,8 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config.h"
#include <stdint.h>
#include <string.h>
@ -388,6 +390,82 @@ static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
return 0;
}
#if HAVE_VDPAU_X11
#include <vdpau/vdpau_x11.h>
#include <X11/Xlib.h>
typedef struct VDPAUDevicePriv {
VdpDeviceDestroy *device_destroy;
Display *dpy;
} VDPAUDevicePriv;
static void vdpau_device_free(AVHWDeviceContext *ctx)
{
AVVDPAUDeviceContext *hwctx = ctx->hwctx;
VDPAUDevicePriv *priv = ctx->user_opaque;
if (priv->device_destroy)
priv->device_destroy(hwctx->device);
if (priv->dpy)
XCloseDisplay(priv->dpy);
av_freep(&priv);
}
static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags)
{
AVVDPAUDeviceContext *hwctx = ctx->hwctx;
VDPAUDevicePriv *priv;
VdpStatus err;
VdpGetInformationString *get_information_string;
const char *display, *vendor;
priv = av_mallocz(sizeof(*priv));
if (!priv)
return AVERROR(ENOMEM);
ctx->user_opaque = priv;
ctx->free = vdpau_device_free;
priv->dpy = XOpenDisplay(device);
if (!priv->dpy) {
av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
XDisplayName(device));
return AVERROR_UNKNOWN;
}
display = XDisplayString(priv->dpy);
err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
&hwctx->device, &hwctx->get_proc_address);
if (err != VDP_STATUS_OK) {
av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
display);
return AVERROR_UNKNOWN;
}
#define GET_CALLBACK(id, result) \
do { \
void *tmp; \
err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
if (err != VDP_STATUS_OK) { \
av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
return AVERROR_UNKNOWN; \
} \
result = tmp; \
} while (0)
GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
get_information_string(&vendor);
av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
"X11 display %s\n", vendor, display);
return 0;
}
#endif
const HWContextType ff_hwcontext_type_vdpau = {
.type = AV_HWDEVICE_TYPE_VDPAU,
.name = "VDPAU",
@ -396,6 +474,9 @@ const HWContextType ff_hwcontext_type_vdpau = {
.device_priv_size = sizeof(VDPAUDeviceContext),
.frames_priv_size = sizeof(VDPAUFramesContext),
#if HAVE_VDPAU_X11
.device_create = vdpau_device_create,
#endif
.device_init = vdpau_device_init,
.device_uninit = vdpau_device_uninit,
.frames_init = vdpau_frames_init,

View File

@ -64,7 +64,7 @@
*/
#define LIBAVUTIL_VERSION_MAJOR 55
#define LIBAVUTIL_VERSION_MINOR 26
#define LIBAVUTIL_VERSION_MINOR 27
#define LIBAVUTIL_VERSION_MICRO 100
#define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \

View File

@ -27,7 +27,6 @@
#include "config.h"
#include "libavutil/attributes.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavutil/cpu.h"
#include "libavutil/bswap.h"

View File

@ -25,7 +25,6 @@
#include "libavutil/attributes.h"
#include "libavutil/avassert.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/x86/asm.h"
#include "libavutil/x86/cpu.h"
#include "libavutil/cpu.h"
#include "libavutil/pixdesc.h"

View File

@ -1,7 +1,7 @@
FATE_SAMPLES_DEMUX-$(call DEMDEC, AVI, FRAPS) += fate-avio-direct
fate-avio-direct: CMD = framecrc -avioflags direct -i $(TARGET_SAMPLES)/fraps/fraps-v5-bouncing-balls-partial.avi -avioflags direct
FATE_SAMPLES_DEMUX-$(CONFIG_AAC_DEMUXER) += fate-adts-demux
FATE_SAMPLES_DEMUX-$(DEMDEC, AAC, AAC) += fate-adts-demux
fate-adts-demux: CMD = crc -i $(TARGET_SAMPLES)/aac/ct_faac-adts.aac -acodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_AEA_DEMUXER) += fate-aea-demux
@ -41,13 +41,13 @@ fate-flv-demux: CMD = framecrc -i $(TARGET_SAMPLES)/flv/Enigma_Principles_of_Lus
FATE_SAMPLES_DEMUX-$(CONFIG_GIF_DEMUXER) += fate-gif-demux
fate-gif-demux: CMD = framecrc -i $(TARGET_SAMPLES)/gif/Newtons_cradle_animation_book_2.gif -vcodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_IV8_DEMUXER) += fate-iv8-demux
FATE_SAMPLES_DEMUX-$(call ALLYES, IV8_DEMUXER MPEG4VIDEO_PARSER) += fate-iv8-demux
fate-iv8-demux: CMD = framecrc -i $(TARGET_SAMPLES)/iv8/zzz-partial.mpg -vcodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_JV_DEMUXER) += fate-jv-demux
fate-jv-demux: CMD = framecrc -i $(TARGET_SAMPLES)/jv/intro.jv -vcodec copy -acodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_LMLM4_DEMUXER) += fate-lmlm4-demux
FATE_SAMPLES_DEMUX-$(call ALLYES, LMLM4_DEMUXER MPEG4VIDEO_PARSER) += fate-lmlm4-demux
fate-lmlm4-demux: CMD = framecrc -i $(TARGET_SAMPLES)/lmlm4/LMLM4_CIFat30fps.divx -t 3 -acodec copy -vcodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_XA_DEMUXER) += fate-maxis-xa
@ -72,10 +72,10 @@ fate-ts-opus-demux: CMD = framecrc -i $(TARGET_SAMPLES)/opus/test-8-7.1.opus-sma
FATE_SAMPLES_DEMUX-$(CONFIG_MTV_DEMUXER) += fate-mtv
fate-mtv: CMD = framecrc -i $(TARGET_SAMPLES)/mtv/comedian_auto-partial.mtv -c copy
FATE_SAMPLES_DEMUX-$(CONFIG_MXF_DEMUXER) += fate-mxf-demux
FATE_SAMPLES_DEMUX-$(call DEMDEC, MXF, MPEG4) += fate-mxf-demux
fate-mxf-demux: CMD = framecrc -i $(TARGET_SAMPLES)/mxf/C0023S01.mxf -acodec copy -vcodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_NC_DEMUXER) += fate-nc-demux
FATE_SAMPLES_DEMUX-$(call ALLYES, NC_DEMUXER MPEG4VIDEO_PARSER) += fate-nc-demux
fate-nc-demux: CMD = framecrc -i $(TARGET_SAMPLES)/nc-camera/nc-sample-partial -vcodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_NISTSPHERE_DEMUXER) += fate-nistsphere-demux
@ -129,7 +129,7 @@ fate-wav-ac3: CMD = framecrc -i $(TARGET_SAMPLES)/ac3/diatonis_invisible_order_a
FATE_SAMPLES_DEMUX-$(CONFIG_WSAUD_DEMUXER) += fate-westwood-aud
fate-westwood-aud: CMD = framecrc -i $(TARGET_SAMPLES)/westwood-aud/excellent.aud -c copy
FATE_SAMPLES_DEMUX-$(CONFIG_WTV_DEMUXER) += fate-wtv-demux
FATE_SAMPLES_DEMUX-$(call ALLYES, WTV_DEMUXER MPEGVIDEO_PARSER) += fate-wtv-demux
fate-wtv-demux: CMD = framecrc -i $(TARGET_SAMPLES)/wtv/law-and-order-partial.wtv -vcodec copy -acodec copy
FATE_SAMPLES_DEMUX-$(CONFIG_XMV_DEMUXER) += fate-xmv-demux

View File

@ -3,7 +3,7 @@ FATE_SCREEN-$(call DEMDEC, AVI, CSCD) += fate-cscd
fate-cscd: CMD = framecrc -i $(TARGET_SAMPLES)/CSCD/sample_video.avi -an -pix_fmt rgb24
FATE_SCREEN-$(call DEMDEC, AVI, DXTORY) += fate-dxtory
fate-dxtory: CMD = framecrc -i $(TARGET_SAMPLES)/dxtory/dxtory_mic.avi
fate-dxtory: CMD = framecrc -i $(TARGET_SAMPLES)/dxtory/dxtory_mic.avi -an
FATE_SAMPLES_AVCONV-$(call DEMDEC, AVI, FIC) += fate-fic-avi
fate-fic-avi: CMD = framecrc -i $(TARGET_SAMPLES)/fic/fic-partial-2MB.avi -an

View File

@ -7,7 +7,7 @@ fate-g722-encode: SRC = tests/data/asynth-16000-1.wav
fate-g722-encode: CMD = enc_dec_pcm wav framemd5 s16le $(SRC) -c:a g722
FATE_VOICE-yes += $(FATE_G722-yes)
fate-g722: $(FATE_G722)
fate-g722: $(FATE_G722-yes)
FATE_G723_1 += fate-g723_1-dec-1
fate-g723_1-dec-1: CMD = framecrc -postfilter 0 -i $(TARGET_SAMPLES)/g723_1/ineqd53.tco
@ -33,8 +33,7 @@ fate-g723_1-dec-7: CMD = framecrc -postfilter 1 -i $(TARGET_SAMPLES)/g723_1/dtx6
FATE_G723_1 += fate-g723_1-dec-8
fate-g723_1-dec-8: CMD = framecrc -postfilter 1 -i $(TARGET_SAMPLES)/g723_1/dtx63e.tco
FATE_G723_1-$(call DEMDEC, G723_1, G723_1) += $(FATE_G723_1)
FATE_SAMPLES_AVCONV += $(FATE_G723_1-yes)
FATE_VOICE-$(call DEMDEC, G723_1, G723_1) += $(FATE_G723_1)
fate-g723_1: $(FATE_G723_1)
FATE_G726 += fate-g726-encode-2bit
@ -62,7 +61,7 @@ FATE_GSM-$(call DEMDEC, MOV, GSM) += fate-gsm-toast
fate-gsm-toast: CMD = framecrc -i $(TARGET_SAMPLES)/gsm/sample-gsm-8000.mov -t 10
FATE_VOICE-yes += $(FATE_GSM-yes)
fate-gsm: $(FATE_GSM)
fate-gsm: $(FATE_GSM-yes)
FATE_VOICE-$(call DEMDEC, QCP, QCELP) += fate-qcelp
fate-qcelp: CMD = pcm -i $(TARGET_SAMPLES)/qcp/0036580847.QCP

View File

@ -3,9 +3,4 @@
#codec_id 0: rawvideo
#dimensions 0: 1280x720
#sar 0: 0/1
#tb 1: 1/48000
#media_type 1: audio
#codec_id 1: pcm_s16le
#sample_rate 1: 48000
#channel_layout 1: 3
0, 0, 0, 1, 1382400, 0x44373645