1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

Merge commit '2852740e23f91d6775714d7cc29b9a73e1111ce0'

* commit '2852740e23f91d6775714d7cc29b9a73e1111ce0':
  vdpau: store picture data in picture's rather than codec's context

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2013-08-06 14:25:08 +02:00
commit c3b2902320
7 changed files with 94 additions and 50 deletions

View File

@ -806,7 +806,7 @@ SKIPHEADERS-$(CONFIG_LIBUTVIDEO) += libutvideo.h
SKIPHEADERS-$(CONFIG_MPEG_XVMC_DECODER) += xvmc.h
SKIPHEADERS-$(CONFIG_VAAPI) += vaapi_internal.h
SKIPHEADERS-$(CONFIG_VDA) += vda.h
SKIPHEADERS-$(CONFIG_VDPAU) += vdpau.h
SKIPHEADERS-$(CONFIG_VDPAU) += vdpau.h vdpau_internal.h
TESTPROGS = cabac \
dct \

View File

@ -38,13 +38,15 @@
* @{
*/
int ff_vdpau_common_start_frame(AVCodecContext *avctx,
int ff_vdpau_common_start_frame(Picture *pic,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
AVVDPAUContext *hwctx = avctx->hwaccel_context;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
hwctx->bitstream_buffers_used = 0;
pic_ctx->bitstream_buffers_allocated = 0;
pic_ctx->bitstream_buffers_used = 0;
pic_ctx->bitstream_buffers = NULL;
return 0;
}
@ -55,31 +57,32 @@ int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
{
AVVDPAUContext *hwctx = avctx->hwaccel_context;
MpegEncContext *s = avctx->priv_data;
VdpVideoSurface surf = ff_vdpau_get_surface_id(s->current_picture_ptr);
Picture *pic = s->current_picture_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
VdpVideoSurface surf = ff_vdpau_get_surface_id(pic);
hwctx->render(hwctx->decoder, surf, (void *)&hwctx->info,
hwctx->bitstream_buffers_used, hwctx->bitstream_buffers);
hwctx->render(hwctx->decoder, surf, (void *)&pic_ctx->info,
pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
ff_mpeg_draw_horiz_band(s, 0, s->avctx->height);
hwctx->bitstream_buffers_used = 0;
av_freep(&pic_ctx->bitstream_buffers);
return 0;
}
#endif
int ff_vdpau_add_buffer(AVCodecContext *avctx,
const uint8_t *buf, uint32_t size)
int ff_vdpau_add_buffer(Picture *pic, const uint8_t *buf, uint32_t size)
{
AVVDPAUContext *hwctx = avctx->hwaccel_context;
VdpBitstreamBuffer *buffers = hwctx->bitstream_buffers;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
buffers = av_fast_realloc(buffers, &hwctx->bitstream_buffers_allocated,
(hwctx->bitstream_buffers_used + 1) * sizeof(*buffers));
buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
(pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
if (!buffers)
return AVERROR(ENOMEM);
hwctx->bitstream_buffers = buffers;
buffers += hwctx->bitstream_buffers_used++;
pic_ctx->bitstream_buffers = buffers;
buffers += pic_ctx->bitstream_buffers_used++;
buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION;
buffers->bitstream = buf;

View File

@ -66,8 +66,8 @@ static void vdpau_h264_set_rf(VdpReferenceFrameH264 *rf, Picture *pic,
static void vdpau_h264_set_reference_frames(AVCodecContext *avctx)
{
H264Context * const h = avctx->priv_data;
AVVDPAUContext *hwctx = avctx->hwaccel_context;
VdpPictureInfoH264 *info = &hwctx->info.h264;
struct vdpau_picture_context *pic_ctx = h->cur_pic_ptr->hwaccel_picture_private;
VdpPictureInfoH264 *info = &pic_ctx->info.h264;
int list;
VdpReferenceFrameH264 *rf = &info->referenceFrames[0];
@ -118,9 +118,9 @@ static int vdpau_h264_start_frame(AVCodecContext *avctx,
const uint8_t *buffer, uint32_t size)
{
H264Context * const h = avctx->priv_data;
AVVDPAUContext *hwctx = avctx->hwaccel_context;
VdpPictureInfoH264 *info = &hwctx->info.h264;
Picture *pic = h->cur_pic_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
VdpPictureInfoH264 *info = &pic_ctx->info.h264;
/* init VdpPictureInfoH264 */
info->slice_count = 0;
@ -161,7 +161,7 @@ static int vdpau_h264_start_frame(AVCodecContext *avctx,
vdpau_h264_set_reference_frames(avctx);
return ff_vdpau_common_start_frame(avctx, buffer, size);
return ff_vdpau_common_start_frame(pic, buffer, size);
}
static const uint8_t start_code_prefix[3] = { 0x00, 0x00, 0x01 };
@ -169,18 +169,20 @@ static const uint8_t start_code_prefix[3] = { 0x00, 0x00, 0x01 };
static int vdpau_h264_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer, uint32_t size)
{
AVVDPAUContext *hwctx = avctx->hwaccel_context;
H264Context *h = avctx->priv_data;
Picture *pic = h->cur_pic_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
int val;
val = ff_vdpau_add_buffer(avctx, start_code_prefix, 3);
val = ff_vdpau_add_buffer(pic, start_code_prefix, 3);
if (val)
return val;
val = ff_vdpau_add_buffer(avctx, buffer, size);
val = ff_vdpau_add_buffer(pic, buffer, size);
if (val)
return val;
hwctx->info.h264.slice_count++;
pic_ctx->info.h264.slice_count++;
return 0;
}
@ -188,13 +190,15 @@ static int vdpau_h264_end_frame(AVCodecContext *avctx)
{
AVVDPAUContext *hwctx = avctx->hwaccel_context;
H264Context *h = avctx->priv_data;
VdpVideoSurface surf = ff_vdpau_get_surface_id(h->cur_pic_ptr);
Picture *pic = h->cur_pic_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
VdpVideoSurface surf = ff_vdpau_get_surface_id(pic);
hwctx->render(hwctx->decoder, surf, (void *)&hwctx->info,
hwctx->bitstream_buffers_used, hwctx->bitstream_buffers);
hwctx->render(hwctx->decoder, surf, (void *)&pic_ctx->info,
pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
ff_h264_draw_horiz_band(h, 0, h->avctx->height);
hwctx->bitstream_buffers_used = 0;
av_freep(&pic_ctx->bitstream_buffers);
return 0;
}
@ -207,4 +211,5 @@ AVHWAccel ff_h264_vdpau_hwaccel = {
.start_frame = vdpau_h264_start_frame,
.end_frame = vdpau_h264_end_frame,
.decode_slice = vdpau_h264_decode_slice,
.priv_data_size = sizeof(struct vdpau_picture_context),
};

View File

@ -25,6 +25,7 @@
#define AVCODEC_VDPAU_INTERNAL_H
#include <stdint.h>
#include <vdpau/vdpau.h>
#include "h264.h"
#include "mpegvideo.h"
@ -34,10 +35,31 @@ static inline uintptr_t ff_vdpau_get_surface_id(Picture *pic)
return (uintptr_t)pic->f.data[3];
}
int ff_vdpau_common_start_frame(AVCodecContext *avctx,
struct vdpau_picture_context {
/**
* VDPAU picture information.
*/
union AVVDPAUPictureInfo info;
/**
* Allocated size of the bitstream_buffers table.
*/
int bitstream_buffers_allocated;
/**
* Useful bitstream buffers in the bitstream buffers table.
*/
int bitstream_buffers_used;
/**
* Table of bitstream buffers.
*/
VdpBitstreamBuffer *bitstream_buffers;
};
int ff_vdpau_common_start_frame(Picture *pic,
const uint8_t *buffer, uint32_t size);
int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx);
int ff_vdpau_add_buffer(AVCodecContext *avctx,
const uint8_t *buf, uint32_t buf_size);
int ff_vdpau_add_buffer(Picture *pic, const uint8_t *buf, uint32_t buf_size);
#endif /* AVCODEC_VDPAU_INTERNAL_H */

View File

@ -31,8 +31,9 @@ static int vdpau_mpeg_start_frame(AVCodecContext *avctx,
const uint8_t *buffer, uint32_t size)
{
MpegEncContext * const s = avctx->priv_data;
AVVDPAUContext *hwctx = avctx->hwaccel_context;
VdpPictureInfoMPEG1Or2 *info = &hwctx->info.mpeg;
Picture *pic = s->current_picture_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
VdpPictureInfoMPEG1Or2 *info = &pic_ctx->info.mpeg;
VdpVideoSurface ref;
int i;
@ -44,11 +45,11 @@ static int vdpau_mpeg_start_frame(AVCodecContext *avctx,
case AV_PICTURE_TYPE_B:
ref = ff_vdpau_get_surface_id(&s->next_picture);
assert(ref != VDP_INVALID_HANDLE);
hwctx->info.mpeg.backward_reference = ref;
info->backward_reference = ref;
/* fall through to forward prediction */
case AV_PICTURE_TYPE_P:
ref = ff_vdpau_get_surface_id(&s->last_picture);
hwctx->info.mpeg.forward_reference = ref;
info->forward_reference = ref;
}
info->slice_count = 0;
@ -74,20 +75,22 @@ static int vdpau_mpeg_start_frame(AVCodecContext *avctx,
info->non_intra_quantizer_matrix[i] = s->inter_matrix[i];
}
return ff_vdpau_common_start_frame(avctx, buffer, size);
return ff_vdpau_common_start_frame(pic, buffer, size);
}
static int vdpau_mpeg_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer, uint32_t size)
{
AVVDPAUContext *hwctx = avctx->hwaccel_context;
MpegEncContext * const s = avctx->priv_data;
Picture *pic = s->current_picture_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
int val;
val = ff_vdpau_add_buffer(avctx, buffer, size);
val = ff_vdpau_add_buffer(pic, buffer, size);
if (val < 0)
return val;
hwctx->info.mpeg.slice_count++;
pic_ctx->info.mpeg.slice_count++;
return 0;
}
@ -100,6 +103,7 @@ AVHWAccel ff_mpeg1_vdpau_hwaccel = {
.start_frame = vdpau_mpeg_start_frame,
.end_frame = ff_vdpau_mpeg_end_frame,
.decode_slice = vdpau_mpeg_decode_slice,
.priv_data_size = sizeof(struct vdpau_picture_context),
};
#endif
@ -112,5 +116,6 @@ AVHWAccel ff_mpeg2_vdpau_hwaccel = {
.start_frame = vdpau_mpeg_start_frame,
.end_frame = ff_vdpau_mpeg_end_frame,
.decode_slice = vdpau_mpeg_decode_slice,
.priv_data_size = sizeof(struct vdpau_picture_context),
};
#endif

View File

@ -31,8 +31,9 @@ static int vdpau_mpeg4_start_frame(AVCodecContext *avctx,
const uint8_t *buffer, uint32_t size)
{
MpegEncContext * const s = avctx->priv_data;
AVVDPAUContext *hwctx = avctx->hwaccel_context;
VdpPictureInfoMPEG4Part2 *info = &hwctx->info.mpeg4;
Picture *pic = s->current_picture_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
VdpPictureInfoMPEG4Part2 *info = &pic_ctx->info.mpeg4;
VdpVideoSurface ref;
int i;
@ -74,8 +75,8 @@ static int vdpau_mpeg4_start_frame(AVCodecContext *avctx,
info->non_intra_quantizer_matrix[i] = s->inter_matrix[i];
}
ff_vdpau_common_start_frame(avctx, buffer, size);
return ff_vdpau_add_buffer(avctx, buffer, size);
ff_vdpau_common_start_frame(pic, buffer, size);
return ff_vdpau_add_buffer(pic, buffer, size);
}
static int vdpau_mpeg4_decode_slice(av_unused AVCodecContext *avctx,
@ -94,6 +95,7 @@ AVHWAccel ff_h263_vdpau_hwaccel = {
.start_frame = vdpau_mpeg4_start_frame,
.end_frame = ff_vdpau_mpeg_end_frame,
.decode_slice = vdpau_mpeg4_decode_slice,
.priv_data_size = sizeof(struct vdpau_picture_context),
};
#endif
@ -106,5 +108,6 @@ AVHWAccel ff_mpeg4_vdpau_hwaccel = {
.start_frame = vdpau_mpeg4_start_frame,
.end_frame = ff_vdpau_mpeg_end_frame,
.decode_slice = vdpau_mpeg4_decode_slice,
.priv_data_size = sizeof(struct vdpau_picture_context),
};
#endif

View File

@ -32,9 +32,10 @@ static int vdpau_vc1_start_frame(AVCodecContext *avctx,
const uint8_t *buffer, uint32_t size)
{
VC1Context * const v = avctx->priv_data;
AVVDPAUContext *hwctx = avctx->hwaccel_context;
MpegEncContext * const s = &v->s;
VdpPictureInfoVC1 *info = &hwctx->info.vc1;
Picture *pic = s->current_picture_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
VdpPictureInfoVC1 *info = &pic_ctx->info.vc1;
VdpVideoSurface ref;
/* fill LvPictureInfoVC1 struct */
@ -88,20 +89,23 @@ static int vdpau_vc1_start_frame(AVCodecContext *avctx,
info->deblockEnable = v->postprocflag & 1;
info->pquant = v->pq;
return ff_vdpau_common_start_frame(avctx, buffer, size);
return ff_vdpau_common_start_frame(pic, buffer, size);
}
static int vdpau_vc1_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer, uint32_t size)
{
AVVDPAUContext *hwctx = avctx->hwaccel_context;
VC1Context * const v = avctx->priv_data;
MpegEncContext * const s = &v->s;
Picture *pic = s->current_picture_ptr;
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
int val;
val = ff_vdpau_add_buffer(avctx, buffer, size);
val = ff_vdpau_add_buffer(pic, buffer, size);
if (val < 0)
return val;
hwctx->info.vc1.slice_count++;
pic_ctx->info.vc1.slice_count++;
return 0;
}
@ -114,6 +118,7 @@ AVHWAccel ff_wmv3_vdpau_hwaccel = {
.start_frame = vdpau_vc1_start_frame,
.end_frame = ff_vdpau_mpeg_end_frame,
.decode_slice = vdpau_vc1_decode_slice,
.priv_data_size = sizeof(struct vdpau_picture_context),
};
#endif
@ -125,4 +130,5 @@ AVHWAccel ff_vc1_vdpau_hwaccel = {
.start_frame = vdpau_vc1_start_frame,
.end_frame = ff_vdpau_mpeg_end_frame,
.decode_slice = vdpau_vc1_decode_slice,
.priv_data_size = sizeof(struct vdpau_picture_context),
};