1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-11-21 10:55:51 +02:00
FFmpeg/libavcodec/vaapi_decode.h
Mark Thompson 5dd9a4b88b vaapi: Implement device-only setup
In this case, the user only supplies a device and the frame context
is allocated internally by lavc.
2017-02-13 21:44:43 +00:00

100 lines
3.0 KiB
C

/*
* This file is part of Libav.
*
* Libav is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Libav is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_DECODE_H
#define AVCODEC_VAAPI_DECODE_H
#include <va/va.h>
#include "libavutil/frame.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_vaapi.h"
#include "avcodec.h"
#include "version.h"
#if FF_API_VAAPI_CONTEXT
#include "vaapi.h"
#endif
static inline VASurfaceID ff_vaapi_get_surface_id(AVFrame *pic)
{
return (uintptr_t)pic->data[3];
}
enum {
MAX_PARAM_BUFFERS = 16,
};
typedef struct VAAPIDecodePicture {
VASurfaceID output_surface;
int nb_param_buffers;
VABufferID param_buffers[MAX_PARAM_BUFFERS];
int nb_slices;
VABufferID *slice_buffers;
int slices_allocated;
} VAAPIDecodePicture;
typedef struct VAAPIDecodeContext {
VAProfile va_profile;
VAEntrypoint va_entrypoint;
VAConfigID va_config;
VAContextID va_context;
#if FF_API_VAAPI_CONTEXT
int have_old_context;
struct vaapi_context *old_context;
AVBufferRef *device_ref;
#endif
AVHWDeviceContext *device;
AVVAAPIDeviceContext *hwctx;
AVHWFramesContext *frames;
AVVAAPIFramesContext *hwfc;
enum AVPixelFormat surface_format;
int surface_count;
} VAAPIDecodeContext;
int ff_vaapi_decode_make_param_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
int type,
const void *data,
size_t size);
int ff_vaapi_decode_make_slice_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
const void *params_data,
size_t params_size,
const void *slice_data,
size_t slice_size);
int ff_vaapi_decode_issue(AVCodecContext *avctx,
VAAPIDecodePicture *pic);
int ff_vaapi_decode_cancel(AVCodecContext *avctx,
VAAPIDecodePicture *pic);
int ff_vaapi_decode_init(AVCodecContext *avctx);
int ff_vaapi_decode_uninit(AVCodecContext *avctx);
#endif /* AVCODEC_VAAPI_DECODE_H */