1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

lavu: add new D3D11 pixfmt and hwcontext

To be used with the new d3d11 hwaccel decode API.

With the new hwaccel API, we don't want surfaces to depend on the
decoder (other than the required dimension and format). The old D3D11VA
pixfmt uses ID3D11VideoDecoderOutputView pointers, which include the
decoder configuration, and thus is incompatible with the new hwaccel
API. This patch introduces AV_PIX_FMT_D3D11, which uses ID3D11Texture2D
and an index. It's simpler and compatible with the new hwaccel API.

The introduced hwcontext supports only the new pixfmt.

Frame upload code untested.

Significantly based on work by Steve Lhomme <robux4@gmail.com>, but with
heavy changes/rewrites.

Merges Libav commit fff90422d1.

Signed-off-by: Diego Biurrun <diego@biurrun.de>
This commit is contained in:
wm4 2017-06-06 18:51:07 +02:00
parent 4d62ee6746
commit 3303511f33
10 changed files with 680 additions and 2 deletions

View File

@ -15,6 +15,9 @@ libavutil: 2015-08-28
API changes, most recent first:
2017-xx-xx - xxxxxxx - lavu 56.67.100 - hwcontext.h
Add AV_HWDEVICE_TYPE_D3D11VA and AV_PIX_FMT_D3D11.
2017-06-24 - xxxxxxx - lavf 57.75.100 - avio.h
Add AVIO_DATA_MARKER_FLUSH_POINT to signal preferred flush points to aviobuf.

View File

@ -33,6 +33,7 @@ HEADERS = adler32.h \
hmac.h \
hwcontext.h \
hwcontext_cuda.h \
hwcontext_d3d11va.h \
hwcontext_dxva2.h \
hwcontext_qsv.h \
hwcontext_vaapi.h \
@ -156,6 +157,7 @@ OBJS = adler32.o \
OBJS-$(!HAVE_ATOMICS_NATIVE) += atomic.o \
OBJS-$(CONFIG_CUDA) += hwcontext_cuda.o
OBJS-$(CONFIG_D3D11VA) += hwcontext_d3d11va.o
OBJS-$(CONFIG_DXVA2) += hwcontext_dxva2.o
OBJS-$(CONFIG_QSV) += hwcontext_qsv.o
OBJS-$(CONFIG_LZO) += lzo.o
@ -171,6 +173,7 @@ SLIBOBJS-$(HAVE_GNU_WINDRES) += avutilres.o
SKIPHEADERS-$(HAVE_CUDA_H) += hwcontext_cuda.h
SKIPHEADERS-$(CONFIG_CUDA) += hwcontext_cuda_internal.h
SKIPHEADERS-$(CONFIG_D3D11VA) += hwcontext_d3d11va.h
SKIPHEADERS-$(CONFIG_DXVA2) += hwcontext_dxva2.h
SKIPHEADERS-$(CONFIG_QSV) += hwcontext_qsv.h
SKIPHEADERS-$(CONFIG_VAAPI) += hwcontext_vaapi.h

View File

@ -32,6 +32,9 @@ static const HWContextType *const hw_table[] = {
#if CONFIG_CUDA
&ff_hwcontext_type_cuda,
#endif
#if CONFIG_D3D11VA
&ff_hwcontext_type_d3d11va,
#endif
#if CONFIG_DXVA2
&ff_hwcontext_type_dxva2,
#endif
@ -53,6 +56,7 @@ static const HWContextType *const hw_table[] = {
static const char *const hw_type_names[] = {
[AV_HWDEVICE_TYPE_CUDA] = "cuda",
[AV_HWDEVICE_TYPE_DXVA2] = "dxva2",
[AV_HWDEVICE_TYPE_D3D11VA] = "d3d11va",
[AV_HWDEVICE_TYPE_QSV] = "qsv",
[AV_HWDEVICE_TYPE_VAAPI] = "vaapi",
[AV_HWDEVICE_TYPE_VDPAU] = "vdpau",

View File

@ -32,6 +32,7 @@ enum AVHWDeviceType {
AV_HWDEVICE_TYPE_QSV,
AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
AV_HWDEVICE_TYPE_NONE,
AV_HWDEVICE_TYPE_D3D11VA,
};
typedef struct AVHWDeviceInternal AVHWDeviceInternal;

View File

@ -0,0 +1,490 @@
/*
* This file is part of Libav.
*
* Libav is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Libav is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <windows.h>
#if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600
#undef _WIN32_WINNT
#define _WIN32_WINNT 0x0600
#endif
#define COBJMACROS
#include <initguid.h>
#include <d3d11.h>
#include <dxgi1_2.h>
#include "avassert.h"
#include "common.h"
#include "hwcontext.h"
#include "hwcontext_d3d11va.h"
#include "hwcontext_internal.h"
#include "imgutils.h"
#include "pixdesc.h"
#include "pixfmt.h"
typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
typedef struct D3D11VAFramesContext {
int nb_surfaces_used;
DXGI_FORMAT format;
ID3D11Texture2D *staging_texture;
} D3D11VAFramesContext;
static const struct {
DXGI_FORMAT d3d_format;
enum AVPixelFormat pix_fmt;
} supported_formats[] = {
{ DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
{ DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
};
static void d3d11va_default_lock(void *ctx)
{
WaitForSingleObjectEx(ctx, INFINITE, FALSE);
}
static void d3d11va_default_unlock(void *ctx)
{
ReleaseMutex(ctx);
}
static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
{
AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
D3D11VAFramesContext *s = ctx->internal->priv;
if (frames_hwctx->texture)
ID3D11Texture2D_Release(frames_hwctx->texture);
if (s->staging_texture)
ID3D11Texture2D_Release(s->staging_texture);
}
static void free_texture(void *opaque, uint8_t *data)
{
ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
}
static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
{
AVBufferRef *buf;
AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
if (!desc) {
ID3D11Texture2D_Release(tex);
return NULL;
}
desc->texture = tex;
desc->index = index;
buf = av_buffer_create((uint8_t *)desc, sizeof(desc), free_texture, tex, 0);
if (!buf) {
ID3D11Texture2D_Release(tex);
av_free(desc);
return NULL;
}
return buf;
}
static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
{
D3D11VAFramesContext *s = ctx->internal->priv;
AVD3D11VAFramesContext *hwctx = ctx->hwctx;
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
HRESULT hr;
ID3D11Texture2D *tex;
D3D11_TEXTURE2D_DESC texDesc = {
.Width = ctx->width,
.Height = ctx->height,
.MipLevels = 1,
.Format = s->format,
.SampleDesc = { .Count = 1 },
.ArraySize = 1,
.Usage = D3D11_USAGE_DEFAULT,
.BindFlags = hwctx->BindFlags,
.MiscFlags = hwctx->MiscFlags,
};
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &tex);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
return NULL;
}
return wrap_texture_buf(tex, 0);
}
static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
{
AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
D3D11VAFramesContext *s = ctx->internal->priv;
AVD3D11VAFramesContext *hwctx = ctx->hwctx;
D3D11_TEXTURE2D_DESC texDesc;
if (!hwctx->texture)
return d3d11va_alloc_single(ctx);
ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc);
if (s->nb_surfaces_used >= texDesc.ArraySize) {
av_log(ctx, AV_LOG_ERROR, "Static surface pool size exceeded.\n");
return NULL;
}
ID3D11Texture2D_AddRef(hwctx->texture);
return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
}
static int d3d11va_frames_init(AVHWFramesContext *ctx)
{
AVD3D11VAFramesContext *hwctx = ctx->hwctx;
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
D3D11VAFramesContext *s = ctx->internal->priv;
int i;
HRESULT hr;
D3D11_TEXTURE2D_DESC texDesc;
for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
if (ctx->sw_format == supported_formats[i].pix_fmt) {
s->format = supported_formats[i].d3d_format;
break;
}
}
if (i == FF_ARRAY_ELEMS(supported_formats)) {
av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
av_get_pix_fmt_name(ctx->sw_format));
return AVERROR(EINVAL);
}
texDesc = (D3D11_TEXTURE2D_DESC){
.Width = ctx->width,
.Height = ctx->height,
.MipLevels = 1,
.Format = s->format,
.SampleDesc = { .Count = 1 },
.ArraySize = ctx->initial_pool_size,
.Usage = D3D11_USAGE_DEFAULT,
.BindFlags = hwctx->BindFlags,
.MiscFlags = hwctx->MiscFlags,
};
if (hwctx->texture) {
D3D11_TEXTURE2D_DESC texDesc2;
ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc2);
if (texDesc.Width != texDesc2.Width ||
texDesc.Height != texDesc2.Height ||
texDesc.Format != texDesc2.Format) {
av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
return AVERROR(EINVAL);
}
} else if (texDesc.ArraySize > 0) {
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
return AVERROR_UNKNOWN;
}
}
texDesc.ArraySize = 1;
texDesc.Usage = D3D11_USAGE_STAGING;
texDesc.BindFlags = 0;
texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
texDesc.MiscFlags = 0;
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &s->staging_texture);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture (%lx)\n", (long)hr);
return AVERROR_UNKNOWN;
}
ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
ctx, d3d11va_pool_alloc, NULL);
if (!ctx->internal->pool_internal)
return AVERROR(ENOMEM);
return 0;
}
static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
{
AVD3D11FrameDescriptor *desc;
frame->buf[0] = av_buffer_pool_get(ctx->pool);
if (!frame->buf[0])
return AVERROR(ENOMEM);
desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data;
frame->data[0] = (uint8_t *)desc->texture;
frame->data[1] = (uint8_t *)desc->index;
frame->format = AV_PIX_FMT_D3D11;
frame->width = ctx->width;
frame->height = ctx->height;
return 0;
}
static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
enum AVHWFrameTransferDirection dir,
enum AVPixelFormat **formats)
{
enum AVPixelFormat *fmts;
fmts = av_malloc_array(2, sizeof(*fmts));
if (!fmts)
return AVERROR(ENOMEM);
fmts[0] = ctx->sw_format;
fmts[1] = AV_PIX_FMT_NONE;
*formats = fmts;
return 0;
}
static void fill_texture_ptrs(uint8_t *data[4], int linesize[4],
AVHWFramesContext *ctx,
D3D11_TEXTURE2D_DESC *desc,
D3D11_MAPPED_SUBRESOURCE *map)
{
int i;
for (i = 0; i < 4; i++)
linesize[i] = map->RowPitch;
av_image_fill_pointers(data, ctx->sw_format, desc->Height,
(uint8_t*)map->pData, linesize);
}
static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
const AVFrame *src)
{
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
D3D11VAFramesContext *s = ctx->internal->priv;
int download = src->format == AV_PIX_FMT_D3D11;
const AVFrame *frame = download ? src : dst;
const AVFrame *other = download ? dst : src;
// (The interface types are compatible.)
ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)frame->data[0];
int index = (intptr_t)frame->data[1];
ID3D11Resource *staging = (ID3D11Resource *)s->staging_texture;
int w = FFMIN(dst->width, src->width);
int h = FFMIN(dst->height, src->height);
uint8_t *map_data[4];
int map_linesize[4];
D3D11_TEXTURE2D_DESC desc;
D3D11_MAPPED_SUBRESOURCE map;
HRESULT hr;
if (frame->hw_frames_ctx->data != (uint8_t *)ctx || other->format != ctx->sw_format)
return AVERROR(EINVAL);
device_hwctx->lock(device_hwctx->lock_ctx);
ID3D11Texture2D_GetDesc(s->staging_texture, &desc);
if (download) {
ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
staging, 0, 0, 0, 0,
texture, index, NULL);
hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
staging, 0, D3D11_MAP_READ, 0, &map);
if (FAILED(hr))
goto map_failed;
fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
av_image_copy(dst->data, dst->linesize, map_data, map_linesize,
ctx->sw_format, w, h);
ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
} else {
hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
staging, 0, D3D11_MAP_WRITE, 0, &map);
if (FAILED(hr))
goto map_failed;
fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
av_image_copy(map_data, map_linesize, src->data, src->linesize,
ctx->sw_format, w, h);
ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
texture, index, 0, 0, 0,
staging, 0, NULL);
}
device_hwctx->unlock(device_hwctx->lock_ctx);
return 0;
map_failed:
av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface (%lx)\n", (long)hr);
device_hwctx->unlock(device_hwctx->lock_ctx);
return AVERROR_UNKNOWN;
}
static int d3d11va_device_init(AVHWDeviceContext *hwdev)
{
AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
HRESULT hr;
if (!device_hwctx->lock) {
device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
return AVERROR(EINVAL);
}
device_hwctx->lock = d3d11va_default_lock;
device_hwctx->unlock = d3d11va_default_unlock;
}
if (!device_hwctx->device_context) {
ID3D11Device_GetImmediateContext(device_hwctx->device, &device_hwctx->device_context);
if (!device_hwctx->device_context)
return AVERROR_UNKNOWN;
}
if (!device_hwctx->video_device) {
hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device, &IID_ID3D11VideoDevice,
(void **)&device_hwctx->video_device);
if (FAILED(hr))
return AVERROR_UNKNOWN;
}
if (!device_hwctx->video_context) {
hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device_context, &IID_ID3D11VideoContext,
(void **)&device_hwctx->video_context);
if (FAILED(hr))
return AVERROR_UNKNOWN;
}
return 0;
}
static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
{
AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
if (device_hwctx->device)
ID3D11Device_Release(device_hwctx->device);
if (device_hwctx->device_context)
ID3D11DeviceContext_Release(device_hwctx->device_context);
if (device_hwctx->video_device)
ID3D11VideoDevice_Release(device_hwctx->video_device);
if (device_hwctx->video_context)
ID3D11VideoContext_Release(device_hwctx->video_context);
if (device_hwctx->lock == d3d11va_default_lock)
CloseHandle(device_hwctx->lock_ctx);
}
static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags)
{
AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
HANDLE d3dlib;
HRESULT hr;
PFN_D3D11_CREATE_DEVICE createD3D;
IDXGIAdapter *pAdapter = NULL;
ID3D10Multithread *pMultithread;
UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
if (device) {
PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
HMODULE dxgilib = LoadLibrary("dxgi.dll");
if (!dxgilib)
return AVERROR_UNKNOWN;
mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory");
if (mCreateDXGIFactory) {
IDXGIFactory2 *pDXGIFactory;
hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
if (SUCCEEDED(hr)) {
int adapter = atoi(device);
if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
pAdapter = NULL;
IDXGIFactory2_Release(pDXGIFactory);
}
}
FreeLibrary(dxgilib);
}
// We let this "leak" - this is fine, as unloading has no great benefit, and
// Windows will mark a DLL as loaded forever if its internal refcount overflows
// from too many LoadLibrary calls.
d3dlib = LoadLibrary("d3d11.dll");
if (!d3dlib) {
av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library\n");
return AVERROR_UNKNOWN;
}
createD3D = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, "D3D11CreateDevice");
if (!createD3D) {
av_log(ctx, AV_LOG_ERROR, "Failed to locate D3D11CreateDevice\n");
return AVERROR_UNKNOWN;
}
hr = createD3D(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
D3D11_SDK_VERSION, &device_hwctx->device, NULL, NULL);
if (pAdapter)
IDXGIAdapter_Release(pAdapter);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device (%lx)\n", (long)hr);
return AVERROR_UNKNOWN;
}
hr = ID3D11Device_QueryInterface(device_hwctx->device, &IID_ID3D10Multithread, (void **)&pMultithread);
if (SUCCEEDED(hr)) {
ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
ID3D10Multithread_Release(pMultithread);
}
return 0;
}
const HWContextType ff_hwcontext_type_d3d11va = {
.type = AV_HWDEVICE_TYPE_D3D11VA,
.name = "D3D11VA",
.device_hwctx_size = sizeof(AVD3D11VADeviceContext),
.frames_hwctx_size = sizeof(AVD3D11VAFramesContext),
.frames_priv_size = sizeof(D3D11VAFramesContext),
.device_create = d3d11va_device_create,
.device_init = d3d11va_device_init,
.device_uninit = d3d11va_device_uninit,
.frames_init = d3d11va_frames_init,
.frames_uninit = d3d11va_frames_uninit,
.frames_get_buffer = d3d11va_get_buffer,
.transfer_get_formats = d3d11va_transfer_get_formats,
.transfer_data_to = d3d11va_transfer_data,
.transfer_data_from = d3d11va_transfer_data,
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11, AV_PIX_FMT_NONE },
};

View File

@ -0,0 +1,160 @@
/*
* This file is part of Libav.
*
* Libav is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Libav is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_HWCONTEXT_D3D11VA_H
#define AVUTIL_HWCONTEXT_D3D11VA_H
/**
* @file
* An API-specific header for AV_HWDEVICE_TYPE_D3D11VA.
*
* The default pool implementation will be fixed-size if initial_pool_size is
* set (and allocate elements from an array texture). Otherwise it will allocate
* individual textures. Be aware that decoding requires a single array texture.
*/
#include <d3d11.h>
/**
* This struct is allocated as AVHWDeviceContext.hwctx
*/
typedef struct AVD3D11VADeviceContext {
/**
* Device used for texture creation and access. This can also be used to
* set the libavcodec decoding device.
*
* Must be set by the user. This is the only mandatory field - the other
* device context fields are set from this and are available for convenience.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11Device *device;
/**
* If unset, this will be set from the device field on init.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11DeviceContext *device_context;
/**
* If unset, this will be set from the device field on init.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11VideoDevice *video_device;
/**
* If unset, this will be set from the device_context field on init.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11VideoContext *video_context;
/**
* Callbacks for locking. They protect accesses to device_context and
* video_context calls. They also protect access to the internal staging
* texture (for av_hwframe_transfer_data() calls). They do NOT protect
* access to hwcontext or decoder state in general.
*
* If unset on init, the hwcontext implementation will set them to use an
* internal mutex.
*
* The underlying lock must be recursive. lock_ctx is for free use by the
* locking implementation.
*/
void (*lock)(void *lock_ctx);
void (*unlock)(void *lock_ctx);
void *lock_ctx;
} AVD3D11VADeviceContext;
/**
* D3D11 frame descriptor for pool allocation.
*
* In user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
* with the data pointer pointing at an object of this type describing the
* planes of the frame.
*
* This has no use outside of custom allocation, and AVFrame AVBufferRef do not
* necessarily point to an instance of this struct.
*/
typedef struct AVD3D11FrameDescriptor {
/**
* The texture in which the frame is located. The reference count is
* managed by the AVBufferRef, and destroying the reference will release
* the interface.
*
* Normally stored in AVFrame.data[0].
*/
ID3D11Texture2D *texture;
/**
* The index into the array texture element representing the frame, or 0
* if the texture is not an array texture.
*
* Normally stored in AVFrame.data[1] (cast from intptr_t).
*/
intptr_t index;
} AVD3D11FrameDescriptor;
/**
* This struct is allocated as AVHWFramesContext.hwctx
*/
typedef struct AVD3D11VAFramesContext {
/**
* The canonical texture used for pool allocation. If this is set to NULL
* on init, the hwframes implementation will allocate and set an array
* texture if initial_pool_size > 0.
*
* The only situation when the API user should set this is:
* - the user wants to do manual pool allocation (setting
* AVHWFramesContext.pool), instead of letting AVHWFramesContext
* allocate the pool
* - of an array texture
* - and wants it to use it for decoding
* - this has to be done before calling av_hwframe_ctx_init()
*
* Deallocating the AVHWFramesContext will always release this interface,
* and it does not matter whether it was user-allocated.
*
* This is in particular used by the libavcodec D3D11VA hwaccel, which
* requires a single array texture. It will create ID3D11VideoDecoderOutputView
* objects for each array texture element on decoder initialization.
*/
ID3D11Texture2D *texture;
/**
* D3D11_TEXTURE2D_DESC.BindFlags used for texture creation. The user must
* at least set D3D11_BIND_DECODER if the frames context is to be used for
* video decoding.
* This field is ignored/invalid if a user-allocated texture is provided.
*/
UINT BindFlags;
/**
* D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
* This field is ignored/invalid if a user-allocated texture is provided.
*/
UINT MiscFlags;
} AVD3D11VAFramesContext;
#endif /* AVUTIL_HWCONTEXT_D3D11VA_H */

View File

@ -158,6 +158,7 @@ int ff_hwframe_map_create(AVBufferRef *hwframe_ref,
extern const HWContextType ff_hwcontext_type_cuda;
extern const HWContextType ff_hwcontext_type_d3d11va;
extern const HWContextType ff_hwcontext_type_dxva2;
extern const HWContextType ff_hwcontext_type_qsv;
extern const HWContextType ff_hwcontext_type_vaapi;

View File

@ -2158,6 +2158,10 @@ static const AVPixFmtDescriptor av_pix_fmt_descriptors[AV_PIX_FMT_NB] = {
.flags = AV_PIX_FMT_FLAG_BE | AV_PIX_FMT_FLAG_PLANAR |
AV_PIX_FMT_FLAG_RGB | AV_PIX_FMT_FLAG_ALPHA,
},
[AV_PIX_FMT_D3D11] = {
.name = "d3d11",
.flags = AV_PIX_FMT_FLAG_HWACCEL,
},
};
#if FF_API_PLUS1_MINUS1
FF_ENABLE_DEPRECATION_WARNINGS

View File

@ -240,7 +240,7 @@ enum AVPixelFormat {
*/
AV_PIX_FMT_MMAL,
AV_PIX_FMT_D3D11VA_VLD, ///< HW decoding through Direct3D11, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer
AV_PIX_FMT_D3D11VA_VLD, ///< HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer
/**
* HW acceleration through CUDA. data[i] contain CUdeviceptr pointers
@ -314,6 +314,18 @@ enum AVPixelFormat {
AV_PIX_FMT_P016LE, ///< like NV12, with 16bpp per component, little-endian
AV_PIX_FMT_P016BE, ///< like NV12, with 16bpp per component, big-endian
/**
* Hardware surfaces for Direct3D11.
*
* This is preferred over the legacy AV_PIX_FMT_D3D11VA_VLD. The new D3D11
* hwaccel API and filtering support AV_PIX_FMT_D3D11 only.
*
* data[0] contains a ID3D11Texture2D pointer, and data[1] contains the
* texture array index of the frame as intptr_t if the ID3D11Texture2D is
* an array texture (or always 0 if it's a normal texture).
*/
AV_PIX_FMT_D3D11,
AV_PIX_FMT_NB ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};

View File

@ -80,7 +80,7 @@
#define LIBAVUTIL_VERSION_MAJOR 55
#define LIBAVUTIL_VERSION_MINOR 66
#define LIBAVUTIL_VERSION_MINOR 67
#define LIBAVUTIL_VERSION_MICRO 100
#define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \