1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-11-21 10:55:51 +02:00

Merge remote-tracking branch 'qatar/master'

* qatar/master: (22 commits)
  rv34: frame-level multi-threading
  mpegvideo: claim ownership of referenced pictures
  aacsbr: prevent out of bounds memcpy().
  ipmovie: fix pts for CODEC_ID_INTERPLAY_DPCM
  sierravmd: fix audio pts
  bethsoftvideo: Use bytestream2 functions to prevent buffer overreads.
  bmpenc: support for PIX_FMT_RGB444
  swscale: fix crash in fast_bilinear code when compiled with -mred-zone.
  swscale: specify register type.
  rv34: use get_bits_left()
  avconv: reinitialize the filtergraph on resolution change.
  vsrc_buffer: error on changing frame parameters.
  avconv: fix -copyinkf.
  fate: Update file checksums after the mov muxer change in a78dbada55
  movenc: Don't store a nonzero creation time if nothing was set by the caller
  bmpdec: support for rgb444 with bitfields compression
  rgb2rgb: allow conversion for <15 bpp
  doc: fix stray reference to FFmpeg
  v4l2: use C99 struct initializer
  v4l2: poll the file descriptor
  ...

Conflicts:
	avconv.c
	libavcodec/aacsbr.c
	libavcodec/bethsoftvideo.c
	libavcodec/kmvc.c
	libavdevice/v4l2.c
	libavfilter/vsrc_buffer.c
	libswscale/swscale_unscaled.c
	libswscale/x86/input.asm
	tests/ref/acodec/alac
	tests/ref/acodec/pcm_s16be
	tests/ref/acodec/pcm_s24be
	tests/ref/acodec/pcm_s32be
	tests/ref/acodec/pcm_s8
	tests/ref/lavf/mov
	tests/ref/vsynth1/dnxhd_1080i
	tests/ref/vsynth1/mpeg4
	tests/ref/vsynth1/qtrle
	tests/ref/vsynth1/svq1
	tests/ref/vsynth2/dnxhd_1080i
	tests/ref/vsynth2/mpeg4
	tests/ref/vsynth2/qtrle
	tests/ref/vsynth2/svq1

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2012-01-11 02:23:55 +01:00
commit 0e7fc3cafe
36 changed files with 373 additions and 205 deletions

View File

@ -638,7 +638,6 @@ static int configure_video_filters(InputStream *ist, OutputStream *ost)
if ((ret = avfilter_graph_parse(ost->graph, ost->avfilter, &inputs, &outputs, NULL)) < 0)
return ret;
av_freep(&ost->avfilter);
} else {
if ((ret = avfilter_link(last_filter, 0, ost->output_video_filter, 0)) < 0)
return ret;
@ -767,6 +766,10 @@ void exit_program(int ret)
bsfc = next;
}
output_streams[i].bitstream_filters = NULL;
#if CONFIG_AVFILTER
av_freep(&output_streams[i].avfilter);
#endif
}
for (i = 0; i < nb_input_files; i++) {
avformat_close_input(&input_files[i].ctx);
@ -1302,6 +1305,7 @@ static void do_subtitle_out(AVFormatContext *s,
static int bit_buffer_size = 1024 * 256;
static uint8_t *bit_buffer = NULL;
#if !CONFIG_AVFILTER
static void do_video_resample(OutputStream *ost,
InputStream *ist,
AVFrame *in_picture,
@ -1316,7 +1320,6 @@ static void do_video_resample(OutputStream *ost,
ost->resample_height != dec->height ||
ost->resample_pix_fmt != dec->pix_fmt;
#if !CONFIG_AVFILTER
if (resample_changed) {
av_log(NULL, AV_LOG_INFO,
"Input stream #%d:%d frame changed from size:%dx%d fmt:%s to size:%dx%d fmt:%s\n",
@ -1332,6 +1335,7 @@ static void do_video_resample(OutputStream *ost,
dec->height != enc->height ||
dec->pix_fmt != enc->pix_fmt;
if (ost->video_resample) {
*out_picture = &ost->resample_frame;
if (!ost->img_resample_ctx || resample_changed) {
@ -1357,21 +1361,13 @@ static void do_video_resample(OutputStream *ost,
sws_scale(ost->img_resample_ctx, in_picture->data, in_picture->linesize,
0, ost->resample_height, (*out_picture)->data, (*out_picture)->linesize);
}
#else
if (resample_changed) {
avfilter_graph_free(&ost->graph);
if (configure_video_filters(ist, ost)) {
av_log(NULL, AV_LOG_FATAL, "Error reinitializing filters!\n");
exit_program(1);
}
}
#endif
if (resample_changed) {
ost->resample_width = dec->width;
ost->resample_height = dec->height;
ost->resample_pix_fmt = dec->pix_fmt;
}
}
#endif
static void do_video_out(AVFormatContext *s,
@ -1426,7 +1422,11 @@ static void do_video_out(AVFormatContext *s,
if (nb_frames <= 0)
return;
#if !CONFIG_AVFILTER
do_video_resample(ost, ist, in_picture, &final_picture);
#else
final_picture = in_picture;
#endif
/* duplicates frame if needed */
for (i = 0; i < nb_frames; i++) {
@ -2012,12 +2012,33 @@ static int transcode_video(InputStream *ist, AVPacket *pkt, int *got_output, int
for (i = 0; i < nb_output_streams; i++) {
OutputStream *ost = &output_streams[i];
int frame_size;
int frame_size, resample_changed;
if (!check_output_constraints(ist, ost) || !ost->encoding_needed)
continue;
#if CONFIG_AVFILTER
resample_changed = ost->resample_width != decoded_frame->width ||
ost->resample_height != decoded_frame->height ||
ost->resample_pix_fmt != decoded_frame->format;
if (resample_changed) {
av_log(NULL, AV_LOG_INFO,
"Input stream #%d:%d frame changed from size:%dx%d fmt:%s to size:%dx%d fmt:%s\n",
ist->file_index, ist->st->index,
ost->resample_width, ost->resample_height, av_get_pix_fmt_name(ost->resample_pix_fmt),
decoded_frame->width, decoded_frame->height, av_get_pix_fmt_name(decoded_frame->format));
avfilter_graph_free(&ost->graph);
if (configure_video_filters(ist, ost)) {
av_log(NULL, AV_LOG_FATAL, "Error reinitializing filters!\n");
exit_program(1);
}
ost->resample_width = decoded_frame->width;
ost->resample_height = decoded_frame->height;
ost->resample_pix_fmt = decoded_frame->format;
}
if (!decoded_frame->sample_aspect_ratio.num)
decoded_frame->sample_aspect_ratio = ist->st->sample_aspect_ratio;
decoded_frame->pts = ist->pts;
@ -3736,13 +3757,13 @@ static OutputStream *new_video_stream(OptionsContext *o, AVFormatContext *oc)
ost->top_field_first = -1;
MATCH_PER_STREAM_OPT(top_field_first, i, ost->top_field_first, oc, st);
MATCH_PER_STREAM_OPT(copy_initial_nonkeyframes, i, ost->copy_initial_nonkeyframes, oc ,st);
#if CONFIG_AVFILTER
MATCH_PER_STREAM_OPT(filters, str, filters, oc, st);
if (filters)
ost->avfilter = av_strdup(filters);
#endif
} else {
MATCH_PER_STREAM_OPT(copy_initial_nonkeyframes, i, ost->copy_initial_nonkeyframes, oc ,st);
}
return ost;

View File

@ -1092,7 +1092,7 @@ drawbox=10:20:200:60:red@@0.5"
Draw text string or text from specified file on top of video using the
libfreetype library.
To enable compilation of this filter you need to configure FFmpeg with
To enable compilation of this filter you need to configure Libav with
@code{--enable-libfreetype}.
The filter also recognizes strftime() sequences in the provided text

View File

@ -4042,13 +4042,13 @@ static OutputStream *new_video_stream(OptionsContext *o, AVFormatContext *oc)
ost->top_field_first = -1;
MATCH_PER_STREAM_OPT(top_field_first, i, ost->top_field_first, oc, st);
MATCH_PER_STREAM_OPT(copy_initial_nonkeyframes, i, ost->copy_initial_nonkeyframes, oc ,st);
#if CONFIG_AVFILTER
MATCH_PER_STREAM_OPT(filters, str, filters, oc, st);
if (filters)
ost->avfilter = av_strdup(filters);
#endif
} else {
MATCH_PER_STREAM_OPT(copy_initial_nonkeyframes, i, ost->copy_initial_nonkeyframes, oc ,st);
}
return ost;

View File

@ -1185,14 +1185,15 @@ static void sbr_qmf_synthesis(DSPContext *dsp, FFTContext *mdct,
{
int i, n;
const float *sbr_qmf_window = div ? sbr_qmf_window_ds : sbr_qmf_window_us;
const int step = 128 >> div;
float *v;
for (i = 0; i < 32; i++) {
if (*v_off < 128 >> div) {
if (*v_off < step) {
int saved_samples = (1280 - 128) >> div;
memcpy(&v0[SBR_SYNTHESIS_BUF_SIZE - saved_samples], v0, saved_samples * sizeof(float));
*v_off = SBR_SYNTHESIS_BUF_SIZE - saved_samples - (128 >> div);
*v_off = SBR_SYNTHESIS_BUF_SIZE - saved_samples - step;
} else {
*v_off -= 128 >> div;
*v_off -= step;
}
v = v0 + *v_off;
if (div) {

View File

@ -34,6 +34,7 @@
typedef struct BethsoftvidContext {
AVFrame frame;
GetByteContext g;
} BethsoftvidContext;
static av_cold int bethsoftvid_decode_init(AVCodecContext *avctx)
@ -47,19 +48,19 @@ static av_cold int bethsoftvid_decode_init(AVCodecContext *avctx)
return 0;
}
static int set_palette(AVFrame * frame, const uint8_t * palette_buffer, int buf_size)
static int set_palette(BethsoftvidContext *ctx)
{
uint32_t * palette = (uint32_t *)frame->data[1];
uint32_t *palette = (uint32_t *)ctx->frame.data[1];
int a;
if (buf_size < 256*3)
if (bytestream2_get_bytes_left(&ctx->g) < 256*3)
return AVERROR_INVALIDDATA;
for(a = 0; a < 256; a++){
palette[a] = 0xFF << 24 | AV_RB24(&palette_buffer[a * 3]) * 4;
palette[a] = 0xFFU << 24 | bytestream2_get_be24u(&ctx->g) * 4;
palette[a] |= palette[a] >> 6 & 0x30303;
}
frame->palette_has_changed = 1;
ctx->frame.palette_has_changed = 1;
return 256*3;
}
@ -67,8 +68,6 @@ static int bethsoftvid_decode_frame(AVCodecContext *avctx,
void *data, int *data_size,
AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
BethsoftvidContext * vid = avctx->priv_data;
char block_type;
uint8_t * dst;
@ -82,29 +81,32 @@ static int bethsoftvid_decode_frame(AVCodecContext *avctx,
av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
return -1;
}
bytestream2_init(&vid->g, avpkt->data, avpkt->size);
dst = vid->frame.data[0];
frame_end = vid->frame.data[0] + vid->frame.linesize[0] * avctx->height;
switch(block_type = *buf++){
case PALETTE_BLOCK:
return set_palette(&vid->frame, buf, buf_size);
switch(block_type = bytestream2_get_byte(&vid->g)){
case PALETTE_BLOCK: {
return set_palette(vid);
}
case VIDEO_YOFF_P_FRAME:
yoffset = bytestream_get_le16(&buf);
yoffset = bytestream2_get_le16(&vid->g);
if(yoffset >= avctx->height)
return -1;
dst += vid->frame.linesize[0] * yoffset;
}
// main code
while((code = *buf++)){
while((code = bytestream2_get_byte(&vid->g))){
int length = code & 0x7f;
// copy any bytes starting at the current position, and ending at the frame width
while(length > remaining){
if(code < 0x80)
bytestream_get_buffer(&buf, dst, remaining);
bytestream2_get_buffer(&vid->g, dst, remaining);
else if(block_type == VIDEO_I_FRAME)
memset(dst, buf[0], remaining);
memset(dst, bytestream2_peek_byte(&vid->g), remaining);
length -= remaining; // decrement the number of bytes to be copied
dst += remaining + wrap_to_next_line; // skip over extra bytes at end of frame
remaining = avctx->width;
@ -114,9 +116,9 @@ static int bethsoftvid_decode_frame(AVCodecContext *avctx,
// copy any remaining bytes after / if line overflows
if(code < 0x80)
bytestream_get_buffer(&buf, dst, length);
bytestream2_get_buffer(&vid->g, dst, length);
else if(block_type == VIDEO_I_FRAME)
memset(dst, *buf++, length);
memset(dst, bytestream2_get_byte(&vid->g), length);
remaining -= length;
dst += length;
}
@ -125,7 +127,7 @@ static int bethsoftvid_decode_frame(AVCodecContext *avctx,
*data_size = sizeof(AVFrame);
*(AVFrame*)data = vid->frame;
return buf_size;
return avpkt->size;
}
static av_cold int bethsoftvid_decode_end(AVCodecContext *avctx)

View File

@ -46,6 +46,7 @@ typedef struct KmvcContext {
uint32_t pal[256];
uint8_t *cur, *prev;
uint8_t *frm0, *frm1;
GetByteContext g;
} KmvcContext;
typedef struct BitBuf {
@ -55,23 +56,19 @@ typedef struct BitBuf {
#define BLK(data, x, y) data[(x) + (y) * 320]
#define kmvc_init_getbits(bb, src) bb.bits = 7; bb.bitbuf = *src++;
#define kmvc_init_getbits(bb, g) bb.bits = 7; bb.bitbuf = bytestream2_get_byte(g);
#define kmvc_getbit(bb, src, src_end, res) {\
#define kmvc_getbit(bb, g, res) {\
res = 0; \
if (bb.bitbuf & (1 << bb.bits)) res = 1; \
bb.bits--; \
if(bb.bits == -1) { \
if (src >= src_end) { \
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n"); \
return AVERROR_INVALIDDATA; \
} \
bb.bitbuf = *src++; \
bb.bitbuf = bytestream2_get_byte(g); \
bb.bits = 7; \
} \
}
static int kmvc_decode_intra_8x8(KmvcContext * ctx, const uint8_t * src, int src_size, int w, int h)
static int kmvc_decode_intra_8x8(KmvcContext * ctx, int w, int h)
{
BitBuf bb;
int res, val;
@ -79,42 +76,33 @@ static int kmvc_decode_intra_8x8(KmvcContext * ctx, const uint8_t * src, int src
int bx, by;
int l0x, l1x, l0y, l1y;
int mx, my;
const uint8_t *src_end = src + src_size;
kmvc_init_getbits(bb, src);
kmvc_init_getbits(bb, &ctx->g);
for (by = 0; by < h; by += 8)
for (bx = 0; bx < w; bx += 8) {
kmvc_getbit(bb, src, src_end, res);
if (!bytestream2_get_bytes_left(&ctx->g)) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
kmvc_getbit(bb, &ctx->g, res);
if (!res) { // fill whole 8x8 block
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
for (i = 0; i < 64; i++)
BLK(ctx->cur, bx + (i & 0x7), by + (i >> 3)) = val;
} else { // handle four 4x4 subblocks
for (i = 0; i < 4; i++) {
l0x = bx + (i & 1) * 4;
l0y = by + (i & 2) * 2;
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) {
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) { // fill whole 4x4 block
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
for (j = 0; j < 16; j++)
BLK(ctx->cur, l0x + (j & 3), l0y + (j >> 2)) = val;
} else { // copy block from already decoded place
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
mx = val & 0xF;
my = val >> 4;
for (j = 0; j < 16; j++)
@ -125,25 +113,17 @@ static int kmvc_decode_intra_8x8(KmvcContext * ctx, const uint8_t * src, int src
for (j = 0; j < 4; j++) {
l1x = l0x + (j & 1) * 2;
l1y = l0y + (j & 2);
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) {
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) { // fill whole 2x2 block
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x, l1y) = val;
BLK(ctx->cur, l1x + 1, l1y) = val;
BLK(ctx->cur, l1x, l1y + 1) = val;
BLK(ctx->cur, l1x + 1, l1y + 1) = val;
} else { // copy block from already decoded place
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
mx = val & 0xF;
my = val >> 4;
BLK(ctx->cur, l1x, l1y) = BLK(ctx->cur, l1x - mx, l1y - my);
@ -155,10 +135,10 @@ static int kmvc_decode_intra_8x8(KmvcContext * ctx, const uint8_t * src, int src
BLK(ctx->cur, l1x + 1 - mx, l1y + 1 - my);
}
} else { // read values for block
BLK(ctx->cur, l1x, l1y) = *src++;
BLK(ctx->cur, l1x + 1, l1y) = *src++;
BLK(ctx->cur, l1x, l1y + 1) = *src++;
BLK(ctx->cur, l1x + 1, l1y + 1) = *src++;
BLK(ctx->cur, l1x, l1y) = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x + 1, l1y) = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x, l1y + 1) = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x + 1, l1y + 1) = bytestream2_get_byte(&ctx->g);
}
}
}
@ -169,7 +149,7 @@ static int kmvc_decode_intra_8x8(KmvcContext * ctx, const uint8_t * src, int src
return 0;
}
static int kmvc_decode_inter_8x8(KmvcContext * ctx, const uint8_t * src, int src_size, int w, int h)
static int kmvc_decode_inter_8x8(KmvcContext * ctx, int w, int h)
{
BitBuf bb;
int res, val;
@ -177,21 +157,20 @@ static int kmvc_decode_inter_8x8(KmvcContext * ctx, const uint8_t * src, int src
int bx, by;
int l0x, l1x, l0y, l1y;
int mx, my;
const uint8_t *src_end = src + src_size;
kmvc_init_getbits(bb, src);
kmvc_init_getbits(bb, &ctx->g);
for (by = 0; by < h; by += 8)
for (bx = 0; bx < w; bx += 8) {
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) {
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) { // fill whole 8x8 block
if (src >= src_end) {
if (!bytestream2_get_bytes_left(&ctx->g)) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
for (i = 0; i < 64; i++)
BLK(ctx->cur, bx + (i & 0x7), by + (i >> 3)) = val;
} else { // copy block from previous frame
@ -200,26 +179,22 @@ static int kmvc_decode_inter_8x8(KmvcContext * ctx, const uint8_t * src, int src
BLK(ctx->prev, bx + (i & 0x7), by + (i >> 3));
}
} else { // handle four 4x4 subblocks
if (!bytestream2_get_bytes_left(&ctx->g)) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
for (i = 0; i < 4; i++) {
l0x = bx + (i & 1) * 4;
l0y = by + (i & 2) * 2;
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) {
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) { // fill whole 4x4 block
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
for (j = 0; j < 16; j++)
BLK(ctx->cur, l0x + (j & 3), l0y + (j >> 2)) = val;
} else { // copy block
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
mx = (val & 0xF) - 8;
my = (val >> 4) - 8;
for (j = 0; j < 16; j++)
@ -230,25 +205,17 @@ static int kmvc_decode_inter_8x8(KmvcContext * ctx, const uint8_t * src, int src
for (j = 0; j < 4; j++) {
l1x = l0x + (j & 1) * 2;
l1y = l0y + (j & 2);
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) {
kmvc_getbit(bb, src, src_end, res);
kmvc_getbit(bb, &ctx->g, res);
if (!res) { // fill whole 2x2 block
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x, l1y) = val;
BLK(ctx->cur, l1x + 1, l1y) = val;
BLK(ctx->cur, l1x, l1y + 1) = val;
BLK(ctx->cur, l1x + 1, l1y + 1) = val;
} else { // copy block
if (src >= src_end) {
av_log(ctx->avctx, AV_LOG_ERROR, "Data overrun\n");
return AVERROR_INVALIDDATA;
}
val = *src++;
val = bytestream2_get_byte(&ctx->g);
mx = (val & 0xF) - 8;
my = (val >> 4) - 8;
BLK(ctx->cur, l1x, l1y) = BLK(ctx->prev, l1x + mx, l1y + my);
@ -260,10 +227,10 @@ static int kmvc_decode_inter_8x8(KmvcContext * ctx, const uint8_t * src, int src
BLK(ctx->prev, l1x + 1 + mx, l1y + 1 + my);
}
} else { // read values for block
BLK(ctx->cur, l1x, l1y) = *src++;
BLK(ctx->cur, l1x + 1, l1y) = *src++;
BLK(ctx->cur, l1x, l1y + 1) = *src++;
BLK(ctx->cur, l1x + 1, l1y + 1) = *src++;
BLK(ctx->cur, l1x, l1y) = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x + 1, l1y) = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x, l1y + 1) = bytestream2_get_byte(&ctx->g);
BLK(ctx->cur, l1x + 1, l1y + 1) = bytestream2_get_byte(&ctx->g);
}
}
}
@ -276,8 +243,6 @@ static int kmvc_decode_inter_8x8(KmvcContext * ctx, const uint8_t * src, int src
static int decode_frame(AVCodecContext * avctx, void *data, int *data_size, AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
KmvcContext *const ctx = avctx->priv_data;
uint8_t *out, *src;
int i;
@ -285,6 +250,7 @@ static int decode_frame(AVCodecContext * avctx, void *data, int *data_size, AVPa
int blocksize;
const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
bytestream2_init(&ctx->g, avpkt->data, avpkt->size);
if (ctx->pic.data[0])
avctx->release_buffer(avctx, &ctx->pic);
@ -295,16 +261,16 @@ static int decode_frame(AVCodecContext * avctx, void *data, int *data_size, AVPa
return -1;
}
header = *buf++;
header = bytestream2_get_byte(&ctx->g);
/* blocksize 127 is really palette change event */
if (buf[0] == 127) {
buf += 3;
if (bytestream2_peek_byte(&ctx->g) == 127) {
bytestream2_skip(&ctx->g, 3);
for (i = 0; i < 127; i++) {
ctx->pal[i + (header & 0x81)] = 0xFF << 24 | AV_RB24(buf);
buf += 4;
ctx->pal[i + (header & 0x81)] = 0xFFU << 24 | bytestream2_get_be24(&ctx->g);
bytestream2_skip(&ctx->g, 1);
}
buf -= 127 * 4 + 3;
bytestream2_seek(&ctx->g, -127 * 4 - 3, SEEK_CUR);
}
if (header & KMVC_KEYFRAME) {
@ -319,7 +285,7 @@ static int decode_frame(AVCodecContext * avctx, void *data, int *data_size, AVPa
ctx->pic.palette_has_changed = 1;
// palette starts from index 1 and has 127 entries
for (i = 1; i <= ctx->palsize; i++) {
ctx->pal[i] = 0xFF << 24 | bytestream_get_be24(&buf);
ctx->pal[i] = 0xFFU << 24 | bytestream2_get_be24(&ctx->g);
}
}
@ -336,7 +302,7 @@ static int decode_frame(AVCodecContext * avctx, void *data, int *data_size, AVPa
/* make the palette available on the way out */
memcpy(ctx->pic.data[1], ctx->pal, 1024);
blocksize = *buf++;
blocksize = bytestream2_get_byte(&ctx->g);
if (blocksize != 8 && blocksize != 127) {
av_log(avctx, AV_LOG_ERROR, "Block size = %i\n", blocksize);
@ -349,10 +315,10 @@ static int decode_frame(AVCodecContext * avctx, void *data, int *data_size, AVPa
memcpy(ctx->cur, ctx->prev, 320 * 200);
break;
case 3:
kmvc_decode_intra_8x8(ctx, buf, buf_size, avctx->width, avctx->height);
kmvc_decode_intra_8x8(ctx, avctx->width, avctx->height);
break;
case 4:
kmvc_decode_inter_8x8(ctx, buf, buf_size, avctx->width, avctx->height);
kmvc_decode_inter_8x8(ctx, avctx->width, avctx->height);
break;
default:
av_log(avctx, AV_LOG_ERROR, "Unknown compression method %i\n", header & KMVC_METHOD);
@ -380,7 +346,7 @@ static int decode_frame(AVCodecContext * avctx, void *data, int *data_size, AVPa
*(AVFrame *) data = ctx->pic;
/* always report that the buffer was completely consumed */
return buf_size;
return avpkt->size;
}

View File

@ -1135,25 +1135,26 @@ int MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
s->codec_id == CODEC_ID_SVQ3);
/* mark & release old frames */
if (s->pict_type != AV_PICTURE_TYPE_B && s->last_picture_ptr &&
s->last_picture_ptr != s->next_picture_ptr &&
s->last_picture_ptr->f.data[0]) {
if (s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3) {
if (s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3) {
if (s->pict_type != AV_PICTURE_TYPE_B && s->last_picture_ptr &&
s->last_picture_ptr != s->next_picture_ptr &&
s->last_picture_ptr->f.data[0]) {
if (s->last_picture_ptr->owner2 == s)
free_frame_buffer(s, s->last_picture_ptr);
}
/* release forgotten pictures */
/* if (mpeg124/h263) */
if (!s->encoding) {
for (i = 0; i < s->picture_count; i++) {
if (s->picture[i].owner2 == s && s->picture[i].f.data[0] &&
&s->picture[i] != s->next_picture_ptr &&
s->picture[i].f.reference) {
if (!(avctx->active_thread_type & FF_THREAD_FRAME))
av_log(avctx, AV_LOG_ERROR,
"releasing zombie picture\n");
free_frame_buffer(s, &s->picture[i]);
}
/* release forgotten pictures */
/* if (mpeg124/h263) */
if (!s->encoding) {
for (i = 0; i < s->picture_count; i++) {
if (s->picture[i].owner2 == s && s->picture[i].f.data[0] &&
&s->picture[i] != s->last_picture_ptr &&
&s->picture[i] != s->next_picture_ptr &&
s->picture[i].f.reference) {
if (!(avctx->active_thread_type & FF_THREAD_FRAME))
av_log(avctx, AV_LOG_ERROR,
"releasing zombie picture\n");
free_frame_buffer(s, &s->picture[i]);
}
}
}
@ -1274,6 +1275,14 @@ int MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
if (s->next_picture_ptr)
ff_copy_picture(&s->next_picture, s->next_picture_ptr);
if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME) &&
(s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3)) {
if (s->next_picture_ptr)
s->next_picture_ptr->owner2 = s;
if (s->last_picture_ptr)
s->last_picture_ptr->owner2 = s;
}
assert(s->pict_type == AV_PICTURE_TYPE_I || (s->last_picture_ptr &&
s->last_picture_ptr->f.data[0]));

View File

@ -279,8 +279,10 @@ AVCodec ff_rv30_decoder = {
.init = rv30_decode_init,
.close = ff_rv34_decode_end,
.decode = ff_rv34_decode_frame,
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_FRAME_THREADS,
.flush = ff_mpeg_flush,
.long_name = NULL_IF_CONFIG_SMALL("RealVideo 3.0"),
.pix_fmts = ff_pixfmt_list_420,
.init_thread_copy = ONLY_IF_THREADS_ENABLED(ff_rv34_decode_init_thread_copy),
.update_thread_context = ONLY_IF_THREADS_ENABLED(ff_rv34_decode_update_thread_context),
};

View File

@ -24,12 +24,16 @@
* RV30/40 decoder common data
*/
#include "libavutil/internal.h"
#include "avcodec.h"
#include "dsputil.h"
#include "mpegvideo.h"
#include "golomb.h"
#include "internal.h"
#include "mathops.h"
#include "rectangle.h"
#include "thread.h"
#include "rv34vlc.h"
#include "rv34data.h"
@ -669,6 +673,14 @@ static inline void rv34_mc(RV34DecContext *r, const int block_type,
if(uvmx == 6 && uvmy == 6)
uvmx = uvmy = 4;
}
if (HAVE_THREADS && (s->avctx->active_thread_type & FF_THREAD_FRAME)) {
/* wait for the referenced mb row to be finished */
int mb_row = FFMIN(s->mb_height - 1, s->mb_y + ((yoff + my + 21) >> 4));
AVFrame *f = dir ? &s->next_picture_ptr->f : &s->last_picture_ptr->f;
ff_thread_await_progress(f, mb_row, 0);
}
dxy = ly*4 + lx;
srcY = dir ? s->next_picture_ptr->f.data[0] : s->last_picture_ptr->f.data[0];
srcU = dir ? s->next_picture_ptr->f.data[1] : s->last_picture_ptr->f.data[1];
@ -824,6 +836,10 @@ static int rv34_decode_mv(RV34DecContext *r, int block_type)
}
case RV34_MB_B_DIRECT:
//surprisingly, it uses motion scheme from next reference frame
/* wait for the current mb row to be finished */
if (HAVE_THREADS && (s->avctx->active_thread_type & FF_THREAD_FRAME))
ff_thread_await_progress(&s->next_picture_ptr->f, s->mb_y - 1, 0);
next_bt = s->next_picture_ptr->f.mb_type[s->mb_x + s->mb_y * s->mb_stride];
if(IS_INTRA(next_bt) || IS_SKIP(next_bt)){
ZERO8x2(s->current_picture_ptr->f.motion_val[0][s->mb_x * 2 + s->mb_y * 2 * s->b8_stride], s->b8_stride);
@ -1186,7 +1202,7 @@ static int check_slice_end(RV34DecContext *r, MpegEncContext *s)
return 1;
if(r->s.mb_skip_run > 1)
return 0;
bits = r->bits - get_bits_count(&s->gb);
bits = get_bits_left(&s->gb);
if(bits < 0 || (bits < 8 && !show_bits(&s->gb, bits)))
return 1;
return 0;
@ -1255,6 +1271,7 @@ static int rv34_decode_slice(RV34DecContext *r, int end, const uint8_t* buf, int
}
}
s->mb_x = s->mb_y = 0;
ff_thread_finish_setup(s->avctx);
} else {
int slice_type = r->si.type ? r->si.type : AV_PICTURE_TYPE_I;
@ -1270,7 +1287,6 @@ static int rv34_decode_slice(RV34DecContext *r, int end, const uint8_t* buf, int
r->si.end = end;
s->qscale = r->si.quant;
r->bits = buf_size*8;
s->mb_num_left = r->si.end - r->si.start;
r->s.mb_skip_run = 0;
@ -1304,6 +1320,11 @@ static int rv34_decode_slice(RV34DecContext *r, int end, const uint8_t* buf, int
if(r->loop_filter && s->mb_y >= 2)
r->loop_filter(r, s->mb_y - 2);
if (HAVE_THREADS && (s->avctx->active_thread_type & FF_THREAD_FRAME))
ff_thread_report_progress(&s->current_picture_ptr->f,
s->mb_y - 2, 0);
}
if(s->mb_x == s->resync_mb_x)
s->first_slice_line=0;
@ -1369,6 +1390,71 @@ av_cold int ff_rv34_decode_init(AVCodecContext *avctx)
return 0;
}
int ff_rv34_decode_init_thread_copy(AVCodecContext *avctx)
{
RV34DecContext *r = avctx->priv_data;
r->s.avctx = avctx;
if (avctx->internal->is_copy) {
r->cbp_chroma = av_malloc(r->s.mb_stride * r->s.mb_height *
sizeof(*r->cbp_chroma));
r->cbp_luma = av_malloc(r->s.mb_stride * r->s.mb_height *
sizeof(*r->cbp_luma));
r->deblock_coefs = av_malloc(r->s.mb_stride * r->s.mb_height *
sizeof(*r->deblock_coefs));
r->intra_types_hist = av_malloc(r->intra_types_stride * 4 * 2 *
sizeof(*r->intra_types_hist));
r->mb_type = av_malloc(r->s.mb_stride * r->s.mb_height *
sizeof(*r->mb_type));
if (!(r->cbp_chroma && r->cbp_luma && r->deblock_coefs &&
r->intra_types_hist && r->mb_type)) {
av_freep(&r->cbp_chroma);
av_freep(&r->cbp_luma);
av_freep(&r->deblock_coefs);
av_freep(&r->intra_types_hist);
av_freep(&r->mb_type);
r->intra_types = NULL;
return AVERROR(ENOMEM);
}
r->intra_types = r->intra_types_hist + r->intra_types_stride * 4;
r->tmp_b_block_base = NULL;
memset(r->mb_type, 0, r->s.mb_stride * r->s.mb_height *
sizeof(*r->mb_type));
MPV_common_init(&r->s);
}
return 0;
}
int ff_rv34_decode_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
{
RV34DecContext *r = dst->priv_data, *r1 = src->priv_data;
MpegEncContext * const s = &r->s, * const s1 = &r1->s;
int err;
if (dst == src || !s1->context_initialized)
return 0;
if ((err = ff_mpeg_update_thread_context(dst, src)))
return err;
r->cur_pts = r1->cur_pts;
r->last_pts = r1->last_pts;
r->next_pts = r1->next_pts;
memset(&r->si, 0, sizeof(r->si));
/* necessary since it is it the condition checked for in decode_slice
* to call MPV_frame_start. cmp. comment at the end of decode_frame */
s->current_picture_ptr = NULL;
return 0;
}
static int get_slice_offset(AVCodecContext *avctx, const uint8_t *buf, int n)
{
if(avctx->slice_count) return avctx->slice_offset[n];
@ -1470,6 +1556,9 @@ int ff_rv34_decode_frame(AVCodecContext *avctx,
if(last && s->current_picture_ptr){
if(r->loop_filter)
r->loop_filter(r, s->mb_height - 1);
if (HAVE_THREADS && (s->avctx->active_thread_type & FF_THREAD_FRAME))
ff_thread_report_progress(&s->current_picture_ptr->f,
s->mb_height - 1, 0);
ff_er_frame_end(s);
MPV_frame_end(s);
if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {

View File

@ -92,7 +92,6 @@ typedef struct RV34DecContext{
const uint8_t *luma_dc_quant_p;///< luma subblock DC quantizer for interframes
RV34VLC *cur_vlcs; ///< VLC set used for current frame decoding
int bits; ///< slice size in bits
H264PredContext h; ///< functions for 4x4 and 16x16 intra block prediction
SliceInfo si; ///< current slice information
@ -134,5 +133,7 @@ int ff_rv34_get_start_offset(GetBitContext *gb, int blocks);
int ff_rv34_decode_init(AVCodecContext *avctx);
int ff_rv34_decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt);
int ff_rv34_decode_end(AVCodecContext *avctx);
int ff_rv34_decode_init_thread_copy(AVCodecContext *avctx);
int ff_rv34_decode_update_thread_context(AVCodecContext *dst, const AVCodecContext *src);
#endif /* AVCODEC_RV34_H */

View File

@ -566,8 +566,10 @@ AVCodec ff_rv40_decoder = {
.init = rv40_decode_init,
.close = ff_rv34_decode_end,
.decode = ff_rv34_decode_frame,
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_FRAME_THREADS,
.flush = ff_mpeg_flush,
.long_name = NULL_IF_CONFIG_SMALL("RealVideo 4.0"),
.pix_fmts = ff_pixfmt_list_420,
.init_thread_copy = ONLY_IF_THREADS_ENABLED(ff_rv34_decode_init_thread_copy),
.update_thread_context = ONLY_IF_THREADS_ENABLED(ff_rv34_decode_update_thread_context),
};

View File

@ -38,6 +38,7 @@
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/time.h>
#include <poll.h>
#if HAVE_SYS_VIDEOIO_H
#include <sys/videoio.h>
#else
@ -54,6 +55,7 @@
#include "libavutil/parseutils.h"
#include "libavutil/pixdesc.h"
#include "libavutil/avstring.h"
#include "libavutil/mathematics.h"
#if CONFIG_LIBV4L2
#include <libv4l2.h>
@ -79,6 +81,7 @@ struct video_data {
int frame_format; /* V4L2_PIX_FMT_* */
int width, height;
int frame_size;
int timeout;
int interlaced;
int top_field_first;
@ -197,14 +200,11 @@ static int device_init(AVFormatContext *ctx, int *width, int *height,
{
struct video_data *s = ctx->priv_data;
int fd = s->fd;
struct v4l2_format fmt;
struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
struct v4l2_pix_format *pix = &fmt.fmt.pix;
int res;
memset(&fmt, 0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
pix->width = *width;
pix->height = *height;
pix->pixelformat = pix_fmt;
@ -362,13 +362,14 @@ static void list_formats(AVFormatContext *ctx, int fd, int type)
static int mmap_init(AVFormatContext *ctx)
{
struct video_data *s = ctx->priv_data;
struct v4l2_requestbuffers req = {0};
int i, res;
struct video_data *s = ctx->priv_data;
struct v4l2_requestbuffers req = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.count = desired_video_buffers,
.memory = V4L2_MEMORY_MMAP
};
req.count = desired_video_buffers;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
res = v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req);
if (res < 0) {
if (errno == EINVAL) {
@ -397,11 +398,11 @@ static int mmap_init(AVFormatContext *ctx)
}
for (i = 0; i < req.count; i++) {
struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
struct v4l2_buffer buf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.index = i,
.memory = V4L2_MEMORY_MMAP
};
res = v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf);
if (res < 0) {
av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");
@ -431,7 +432,7 @@ static int mmap_init(AVFormatContext *ctx)
static void mmap_release_buffer(AVPacket *pkt)
{
struct v4l2_buffer buf = {0};
struct v4l2_buffer buf = { 0 };
int res, fd;
struct buff_data *buf_descriptor = pkt->priv;
@ -456,12 +457,20 @@ static void mmap_release_buffer(AVPacket *pkt)
static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
{
struct video_data *s = ctx->priv_data;
struct v4l2_buffer buf = {0};
struct v4l2_buffer buf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.memory = V4L2_MEMORY_MMAP
};
struct buff_data *buf_descriptor;
struct pollfd p = { .fd = s->fd, .events = POLLIN };
int res;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
res = poll(&p, 1, s->timeout);
if (res < 0)
return AVERROR(errno);
if (!(p.revents & (POLLIN | POLLERR | POLLHUP)))
return AVERROR(EAGAIN);
/* FIXME: Some special treatment might be needed in case of loss of signal... */
while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
@ -513,11 +522,11 @@ static int mmap_start(AVFormatContext *ctx)
int i, res;
for (i = 0; i < s->buffers; i++) {
struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
struct v4l2_buffer buf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.index = i,
.memory = V4L2_MEMORY_MMAP
};
res = v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf);
if (res < 0) {
@ -560,12 +569,12 @@ static void mmap_close(struct video_data *s)
static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
{
struct video_data *s = s1->priv_data;
struct v4l2_input input = {0};
struct v4l2_standard standard = {0};
struct v4l2_streamparm streamparm = {0};
struct v4l2_input input = { 0 };
struct v4l2_standard standard = { 0 };
struct v4l2_streamparm streamparm = { 0 };
struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe;
AVRational framerate_q = { 0 };
int i, ret;
AVRational framerate_q={0};
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@ -596,7 +605,7 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set standard: %s\n",
s->standard);
/* set tv standard */
for (i = 0;; i++) {
for(i=0;;i++) {
standard.index = i;
ret = v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard);
if (ret < 0 || !av_strcasecmp(standard.name, s->standard))
@ -649,6 +658,10 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
s1->streams[0]->codec->time_base.den = tpf->denominator;
s1->streams[0]->codec->time_base.num = tpf->numerator;
s->timeout = 100 +
av_rescale_q(1, s1->streams[0]->codec->time_base,
(AVRational){1, 1000});
return 0;
}
@ -722,11 +735,15 @@ static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
}
if (s->pixel_format) {
AVCodec *codec = avcodec_find_decoder_by_name(s->pixel_format);
if (codec)
s1->video_codec_id = codec->id;
pix_fmt = av_get_pix_fmt(s->pixel_format);
if (pix_fmt == PIX_FMT_NONE) {
av_log(s1, AV_LOG_ERROR, "No such pixel format: %s.\n",
if (pix_fmt == PIX_FMT_NONE && !codec) {
av_log(s1, AV_LOG_ERROR, "No such input format: %s.\n",
s->pixel_format);
res = AVERROR(EINVAL);
@ -832,7 +849,8 @@ static const AVOption options[] = {
{ "standard", "TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
{ "channel", "TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.dbl = 0 }, 0, INT_MAX, DEC },
{ "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
{ "pixel_format", "", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
{ "pixel_format", "Preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
{ "input_format", "Preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
{ "framerate", "", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
{ "list_formats", "List available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.dbl = 0 }, 0, INT_MAX, DEC, "list_formats" },
{ "all", "Show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.dbl = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" },

View File

@ -39,6 +39,12 @@ typedef struct {
char sws_param[256];
} BufferSourceContext;
#define CHECK_PARAM_CHANGE(s, c, width, height, format)\
if (c->w != width || c->h != height || c->pix_fmt != format) {\
av_log(s, AV_LOG_ERROR, "Changing frame properties on the fly is not supported.\n");\
return AVERROR(EINVAL);\
}
int av_vsrc_buffer_add_video_buffer_ref(AVFilterContext *buffer_filter,
AVFilterBufferRef *picref, int flags)
{
@ -125,6 +131,8 @@ int av_buffersrc_buffer(AVFilterContext *s, AVFilterBufferRef *buf)
return AVERROR(EINVAL);
}
// CHECK_PARAM_CHANGE(s, c, buf->video->w, buf->video->h, buf->format);
c->picref = buf;
return 0;

View File

@ -144,7 +144,7 @@ static int load_ipmovie_packet(IPMVEContext *s, AVIOContext *pb,
(s->audio_chunk_size / s->audio_channels / (s->audio_bits / 8));
else
s->audio_frame_count +=
(s->audio_chunk_size - 6) / s->audio_channels;
(s->audio_chunk_size - 6 - s->audio_channels) / s->audio_channels;
av_dlog(NULL, "sending audio frame with pts %"PRId64" (%d audio frames)\n",
pkt->pts, s->audio_frame_count);

View File

@ -2570,7 +2570,8 @@ static int mov_write_header(AVFormatContext *s)
#endif
if (t = av_dict_get(s->metadata, "creation_time", NULL, 0))
mov->time = ff_iso8601_to_unix_time(t->value);
mov->time += 0x7C25B080; //1970 based -> 1904 based
if (mov->time)
mov->time += 0x7C25B080; // 1970 based -> 1904 based
if (mov->chapter_track)
mov_create_chapter_track(s, mov->chapter_track);

View File

@ -207,7 +207,7 @@ static int vmd_read_header(AVFormatContext *s,
vmd->frame_table[total_frames].pts = current_audio_pts;
total_frames++;
if(!current_audio_pts)
current_audio_pts += sound_buffers;
current_audio_pts += sound_buffers - 1;
else
current_audio_pts++;
break;

View File

@ -135,7 +135,7 @@ cglobal %2ToY, 5, 5, %1, dst, unused0, unused1, src, w
%macro YUYV_TO_UV_FN 2-3
cglobal %2ToUV, 4, 5, %1, dstU, dstV, unused, src, w
%ifdef ARCH_X86_64
movsxd wq, r5m
movsxd wq, dword r5m
%else ; x86-32
mov wq, r5m
%endif
@ -190,7 +190,7 @@ cglobal %2ToUV, 4, 5, %1, dstU, dstV, unused, src, w
%macro NVXX_TO_UV_FN 2
cglobal %2ToUV, 4, 5, %1, dstU, dstV, unused, src, w
%ifdef ARCH_X86_64
movsxd wq, r5m
movsxd wq, dword r5m
%else ; x86-32
mov wq, r5m
%endif

View File

@ -1584,12 +1584,24 @@ static void RENAME(hyscale_fast)(SwsContext *c, int16_t *dst,
void *mmx2FilterCode= c->lumMmx2FilterCode;
int i;
#if defined(PIC)
DECLARE_ALIGNED(8, uint64_t, ebxsave);
uint64_t ebxsave;
#endif
#if ARCH_X86_64
uint64_t retsave;
#endif
__asm__ volatile(
#if defined(PIC)
"mov %%"REG_b", %5 \n\t"
#if ARCH_X86_64
"mov -8(%%rsp), %%"REG_a" \n\t"
"mov %%"REG_a", %6 \n\t"
#endif
#else
#if ARCH_X86_64
"mov -8(%%rsp), %%"REG_a" \n\t"
"mov %%"REG_a", %5 \n\t"
#endif
#endif
"pxor %%mm7, %%mm7 \n\t"
"mov %0, %%"REG_c" \n\t"
@ -1631,11 +1643,23 @@ static void RENAME(hyscale_fast)(SwsContext *c, int16_t *dst,
#if defined(PIC)
"mov %5, %%"REG_b" \n\t"
#if ARCH_X86_64
"mov %6, %%"REG_a" \n\t"
"mov %%"REG_a", -8(%%rsp) \n\t"
#endif
#else
#if ARCH_X86_64
"mov %5, %%"REG_a" \n\t"
"mov %%"REG_a", -8(%%rsp) \n\t"
#endif
#endif
:: "m" (src), "m" (dst), "m" (filter), "m" (filterPos),
"m" (mmx2FilterCode)
#if defined(PIC)
,"m" (ebxsave)
#endif
#if ARCH_X86_64
,"m"(retsave)
#endif
: "%"REG_a, "%"REG_c, "%"REG_d, "%"REG_S, "%"REG_D
#if !defined(PIC)
@ -1658,10 +1682,22 @@ static void RENAME(hcscale_fast)(SwsContext *c, int16_t *dst1, int16_t *dst2,
#if defined(PIC)
DECLARE_ALIGNED(8, uint64_t, ebxsave);
#endif
#if ARCH_X86_64
DECLARE_ALIGNED(8, uint64_t, retsave);
#endif
__asm__ volatile(
#if defined(PIC)
"mov %%"REG_b", %7 \n\t"
#if ARCH_X86_64
"mov -8(%%rsp), %%"REG_a" \n\t"
"mov %%"REG_a", %8 \n\t"
#endif
#else
#if ARCH_X86_64
"mov -8(%%rsp), %%"REG_a" \n\t"
"mov %%"REG_a", %7 \n\t"
#endif
#endif
"pxor %%mm7, %%mm7 \n\t"
"mov %0, %%"REG_c" \n\t"
@ -1691,11 +1727,23 @@ static void RENAME(hcscale_fast)(SwsContext *c, int16_t *dst1, int16_t *dst2,
#if defined(PIC)
"mov %7, %%"REG_b" \n\t"
#if ARCH_X86_64
"mov %8, %%"REG_a" \n\t"
"mov %%"REG_a", -8(%%rsp) \n\t"
#endif
#else
#if ARCH_X86_64
"mov %7, %%"REG_a" \n\t"
"mov %%"REG_a", -8(%%rsp) \n\t"
#endif
#endif
:: "m" (src1), "m" (dst1), "m" (filter), "m" (filterPos),
"m" (mmx2FilterCode), "m" (src2), "m"(dst2)
#if defined(PIC)
,"m" (ebxsave)
#endif
#if ARCH_X86_64
,"m"(retsave)
#endif
: "%"REG_a, "%"REG_c, "%"REG_d, "%"REG_S, "%"REG_D
#if !defined(PIC)

View File

@ -1,4 +1,4 @@
4fe333ff79e86cca6ba8c109cc08263e *./tests/data/acodec/alac.m4a
cf9a4b40ab945367cbb0e6cbb4cf37a1 *./tests/data/acodec/alac.m4a
389166 ./tests/data/acodec/alac.m4a
64151e4bcc2b717aa5a8454d424d6a1f *./tests/data/alac.acodec.out.wav
stddev: 0.00 PSNR:999.99 MAXDIFF: 0 bytes: 1058400/ 1058400

View File

@ -1,4 +1,4 @@
d07e475322765c20b1fcdb822ad5dc04 *./tests/data/acodec/pcm_s16be.mov
4529713e8b744edb3e62c4e688a3f987 *./tests/data/acodec/pcm_s16be.mov
1059065 ./tests/data/acodec/pcm_s16be.mov
64151e4bcc2b717aa5a8454d424d6a1f *./tests/data/pcm_s16be.acodec.out.wav
stddev: 0.00 PSNR:999.99 MAXDIFF: 0 bytes: 1058400/ 1058400

View File

@ -1,4 +1,4 @@
f66d9543a4e04346818e802c4f2d7a30 *./tests/data/acodec/pcm_s24be.mov
8a7b79317738f91a137c7f6519c3df72 *./tests/data/acodec/pcm_s24be.mov
1588265 ./tests/data/acodec/pcm_s24be.mov
64151e4bcc2b717aa5a8454d424d6a1f *./tests/data/pcm_s24be.acodec.out.wav
stddev: 0.00 PSNR:999.99 MAXDIFF: 0 bytes: 1058400/ 1058400

View File

@ -1,4 +1,4 @@
09c919947211de14b3ad0e7603e5b44e *./tests/data/acodec/pcm_s32be.mov
c9f29c97890f69796faa227c3c08c3d8 *./tests/data/acodec/pcm_s32be.mov
2117465 ./tests/data/acodec/pcm_s32be.mov
64151e4bcc2b717aa5a8454d424d6a1f *./tests/data/pcm_s32be.acodec.out.wav
stddev: 0.00 PSNR:999.99 MAXDIFF: 0 bytes: 1058400/ 1058400

View File

@ -1,4 +1,4 @@
52d8c65c4987227979785d5ac2030175 *./tests/data/acodec/pcm_s8.mov
bfd442109b661daf8ee7a468d9d6b944 *./tests/data/acodec/pcm_s8.mov
529865 ./tests/data/acodec/pcm_s8.mov
651d4eb8d98dfcdda96ae6c43d8f156b *./tests/data/pcm_s8.acodec.out.wav
stddev: 147.89 PSNR: 52.93 MAXDIFF: 255 bytes: 1058400/ 1058400

View File

@ -1,3 +1,3 @@
3340b7ffe1b1d98a50622bd53f786d41 *./tests/data/lavf/lavf.mov
37c9c1272d9c524b44692e9192d82033 *./tests/data/lavf/lavf.mov
357857 ./tests/data/lavf/lavf.mov
./tests/data/lavf/lavf.mov CRC=0x2f6a9b26

View File

@ -1,4 +1,4 @@
d2621659037803a30b77e3d4f87eb733 *./tests/data/vsynth1/dnxhd-1080i.mov
153afc6bd83611db4ba0d6d509fc3d74 *./tests/data/vsynth1/dnxhd-1080i.mov
3031895 ./tests/data/vsynth1/dnxhd-1080i.mov
0c651e840f860592f0d5b66030d9fa32 *./tests/data/dnxhd_1080i.vsynth1.out.yuv
stddev: 6.29 PSNR: 32.15 MAXDIFF: 64 bytes: 760320/ 7603200

View File

@ -1,4 +1,4 @@
f32960be0f05be8b2ed03447e1eaea6f *./tests/data/vsynth1/odivx.mp4
6ea715a2de70af8d37128ce643e4cca0 *./tests/data/vsynth1/odivx.mp4
539996 ./tests/data/vsynth1/odivx.mp4
8828a375448dc5c2215163ba70656f89 *./tests/data/mpeg4.vsynth1.out.yuv
stddev: 7.97 PSNR: 30.10 MAXDIFF: 105 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
062e1f5ec6ebdbc43ee69e0393221866 *./tests/data/vsynth1/prores.mov
e8487134a5e4b8351de7ad386fb25e27 *./tests/data/vsynth1/prores.mov
5022795 ./tests/data/vsynth1/prores.mov
a2e2d1d45341a94ff994d1d92629f778 *./tests/data/prores.vsynth1.out.yuv
stddev: 2.47 PSNR: 40.27 MAXDIFF: 31 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
b67340783a817fe398fd138175c7fa07 *./tests/data/vsynth1/qtrle.mov
54a37ac94abdd95b9e5726a2211e8e1c *./tests/data/vsynth1/qtrle.mov
15263072 ./tests/data/vsynth1/qtrle.mov
93695a27c24a61105076ca7b1f010bbd *./tests/data/qtrle.vsynth1.out.yuv
stddev: 3.42 PSNR: 37.44 MAXDIFF: 48 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
f0c3cd26e80674a62683685d0508622a *./tests/data/vsynth1/qtrlegray.mov
7b04fbf0fa29600517d048882583128b *./tests/data/vsynth1/qtrlegray.mov
5113268 ./tests/data/vsynth1/qtrlegray.mov
29def4aed035ed65d3a89f7d382fccbe *./tests/data/qtrlegray.vsynth1.out.yuv
stddev: 25.95 PSNR: 19.85 MAXDIFF: 122 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
ebaf85e8743ad1f6c2228d473fe16d83 *./tests/data/vsynth1/svq1.mov
0658c40cfef3964c60bb9b05233727de *./tests/data/vsynth1/svq1.mov
1334207 ./tests/data/vsynth1/svq1.mov
9cc35c54b2c77d36bd7e308b393c1f81 *./tests/data/svq1.vsynth1.out.yuv
stddev: 9.58 PSNR: 28.50 MAXDIFF: 210 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
af3b282a2194a7e2748df7684330728a *./tests/data/vsynth2/dnxhd-1080i.mov
677588203650770792b63a66231bd127 *./tests/data/vsynth2/dnxhd-1080i.mov
3031895 ./tests/data/vsynth2/dnxhd-1080i.mov
3c559af629ae0a8fb1a9a0e4b4da7733 *./tests/data/dnxhd_1080i.vsynth2.out.yuv
stddev: 1.31 PSNR: 45.77 MAXDIFF: 23 bytes: 760320/ 7603200

View File

@ -1,4 +1,4 @@
4d092ca067362a61b9c96f5f12a1ab5a *./tests/data/vsynth2/odivx.mp4
051ff85797971b9047723044eaec07be *./tests/data/vsynth2/odivx.mp4
119649 ./tests/data/vsynth2/odivx.mp4
90a3577850239083a9042bef33c50e85 *./tests/data/mpeg4.vsynth2.out.yuv
stddev: 5.34 PSNR: 33.57 MAXDIFF: 83 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
85f16a09120b668d02cb9cce86dfa9b1 *./tests/data/vsynth2/prores.mov
81988143e6169dbf735bd180bf5a8703 *./tests/data/vsynth2/prores.mov
2844034 ./tests/data/vsynth2/prores.mov
b5844025c0f4c7c37db702c3213db232 *./tests/data/prores.vsynth2.out.yuv
stddev: 1.31 PSNR: 45.77 MAXDIFF: 11 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
f18ed8321e19a7cadedc3ba32d71e0ea *./tests/data/vsynth2/qtrle.mov
04c58848958a0684eba8a816e00dba28 *./tests/data/vsynth2/qtrle.mov
14798259 ./tests/data/vsynth2/qtrle.mov
98d0e2854731472c5bf13d8638502d0a *./tests/data/qtrle.vsynth2.out.yuv
stddev: 1.26 PSNR: 46.10 MAXDIFF: 13 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
799451fc6215c51fc892a2284721b8e7 *./tests/data/vsynth2/qtrlegray.mov
b9769bd1ff7e3b0fb26dfc7555e789b4 *./tests/data/vsynth2/qtrlegray.mov
5111257 ./tests/data/vsynth2/qtrlegray.mov
f63b5ebdfdba750e547c25131b0a3fd1 *./tests/data/qtrlegray.vsynth2.out.yuv
stddev: 19.42 PSNR: 22.36 MAXDIFF: 72 bytes: 7603200/ 7603200

View File

@ -1,4 +1,4 @@
8d5275bf59aac368d5da11d9c34b8cf2 *./tests/data/vsynth2/svq1.mov
320a7fbbaecc0989df054bf9678bfdf6 *./tests/data/vsynth2/svq1.mov
766691 ./tests/data/vsynth2/svq1.mov
aa03471dac3f49455a33a2b19fda1098 *./tests/data/svq1.vsynth2.out.yuv
stddev: 3.23 PSNR: 37.93 MAXDIFF: 61 bytes: 7603200/ 7603200