mirror of
https://github.com/FFmpeg/FFmpeg.git
synced 2024-12-23 12:43:46 +02:00
avconv: get output pixel format from lavfi.
This way we don't require a clearly defined corresponding input stream. The result for the xwd test changes because rgb24 is now chosen instead of bgra.
This commit is contained in:
parent
7af99a01c4
commit
b7327887ea
79
avconv.c
79
avconv.c
@ -252,6 +252,8 @@ typedef struct OutputStream {
|
||||
int stream_copy;
|
||||
const char *attachment_filename;
|
||||
int copy_initial_nonkeyframes;
|
||||
|
||||
enum PixelFormat pix_fmts[2];
|
||||
} OutputStream;
|
||||
|
||||
|
||||
@ -543,13 +545,24 @@ static void filter_release_buffer(AVFilterBuffer *fb)
|
||||
unref_buffer(buf->ist, buf);
|
||||
}
|
||||
|
||||
static const enum PixelFormat *choose_pixel_fmts(OutputStream *ost)
|
||||
{
|
||||
if (ost->st->codec->pix_fmt != PIX_FMT_NONE) {
|
||||
ost->pix_fmts[0] = ost->st->codec->pix_fmt;
|
||||
return ost->pix_fmts;
|
||||
} else if (ost->enc->pix_fmts)
|
||||
return ost->enc->pix_fmts;
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static int configure_video_filters(InputStream *ist, OutputStream *ost)
|
||||
{
|
||||
AVFilterContext *last_filter, *filter;
|
||||
/** filter graph containing all filters including input & output */
|
||||
AVCodecContext *codec = ost->st->codec;
|
||||
AVCodecContext *icodec = ist->st->codec;
|
||||
SinkContext sink_ctx = { .pix_fmt = codec->pix_fmt };
|
||||
SinkContext sink_ctx = { .pix_fmts = choose_pixel_fmts(ost) };
|
||||
AVRational sample_aspect_ratio;
|
||||
char args[255];
|
||||
int ret;
|
||||
@ -621,6 +634,7 @@ static int configure_video_filters(InputStream *ist, OutputStream *ost)
|
||||
ost->frame_aspect_ratio ? // overridden by the -aspect cli option
|
||||
av_d2q(ost->frame_aspect_ratio * codec->height/codec->width, 255) :
|
||||
ost->output_video_filter->inputs[0]->sample_aspect_ratio;
|
||||
codec->pix_fmt = ost->output_video_filter->inputs[0]->format;
|
||||
|
||||
return 0;
|
||||
}
|
||||
@ -833,34 +847,6 @@ static void choose_sample_rate(AVStream *st, AVCodec *codec)
|
||||
}
|
||||
}
|
||||
|
||||
static void choose_pixel_fmt(AVStream *st, AVCodec *codec)
|
||||
{
|
||||
if (codec && codec->pix_fmts) {
|
||||
const enum PixelFormat *p = codec->pix_fmts;
|
||||
if (st->codec->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
|
||||
if (st->codec->codec_id == CODEC_ID_MJPEG) {
|
||||
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
|
||||
} else if (st->codec->codec_id == CODEC_ID_LJPEG) {
|
||||
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
|
||||
PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
|
||||
}
|
||||
}
|
||||
for (; *p != PIX_FMT_NONE; p++) {
|
||||
if (*p == st->codec->pix_fmt)
|
||||
break;
|
||||
}
|
||||
if (*p == PIX_FMT_NONE) {
|
||||
if (st->codec->pix_fmt != PIX_FMT_NONE)
|
||||
av_log(NULL, AV_LOG_WARNING,
|
||||
"Incompatible pixel format '%s' for codec '%s', auto-selecting format '%s'\n",
|
||||
av_pix_fmt_descriptors[st->codec->pix_fmt].name,
|
||||
codec->name,
|
||||
av_pix_fmt_descriptors[codec->pix_fmts[0]].name);
|
||||
st->codec->pix_fmt = codec->pix_fmts[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static double
|
||||
get_sync_ipts(const OutputStream *ost, int64_t pts)
|
||||
{
|
||||
@ -2401,31 +2387,11 @@ static int transcode_init(void)
|
||||
ost->resample_channels = icodec->channels;
|
||||
break;
|
||||
case AVMEDIA_TYPE_VIDEO:
|
||||
if (codec->pix_fmt == PIX_FMT_NONE)
|
||||
codec->pix_fmt = icodec->pix_fmt;
|
||||
choose_pixel_fmt(ost->st, ost->enc);
|
||||
|
||||
if (ost->st->codec->pix_fmt == PIX_FMT_NONE) {
|
||||
av_log(NULL, AV_LOG_FATAL, "Video pixel format is unknown, stream cannot be encoded\n");
|
||||
exit_program(1);
|
||||
}
|
||||
|
||||
if (!codec->width || !codec->height) {
|
||||
codec->width = icodec->width;
|
||||
codec->height = icodec->height;
|
||||
}
|
||||
|
||||
ost->video_resample = codec->width != icodec->width ||
|
||||
codec->height != icodec->height ||
|
||||
codec->pix_fmt != icodec->pix_fmt;
|
||||
if (ost->video_resample) {
|
||||
codec->bits_per_raw_sample = 0;
|
||||
}
|
||||
|
||||
ost->resample_height = icodec->height;
|
||||
ost->resample_width = icodec->width;
|
||||
ost->resample_pix_fmt = icodec->pix_fmt;
|
||||
|
||||
/*
|
||||
* We want CFR output if and only if one of those is true:
|
||||
* 1) user specified output framerate with -r
|
||||
@ -2455,6 +2421,18 @@ static int transcode_init(void)
|
||||
av_log(NULL, AV_LOG_FATAL, "Error opening filters!\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
ost->video_resample = codec->width != icodec->width ||
|
||||
codec->height != icodec->height ||
|
||||
codec->pix_fmt != icodec->pix_fmt;
|
||||
if (ost->video_resample) {
|
||||
codec->bits_per_raw_sample = 0;
|
||||
}
|
||||
|
||||
ost->resample_height = icodec->height;
|
||||
ost->resample_width = icodec->width;
|
||||
ost->resample_pix_fmt = icodec->pix_fmt;
|
||||
|
||||
break;
|
||||
case AVMEDIA_TYPE_SUBTITLE:
|
||||
codec->time_base = (AVRational){1, 1000};
|
||||
@ -3535,6 +3513,9 @@ static OutputStream *new_output_stream(OptionsContext *o, AVFormatContext *oc, e
|
||||
st->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
|
||||
|
||||
av_opt_get_int(sws_opts, "sws_flags", 0, &ost->sws_flags);
|
||||
|
||||
ost->pix_fmts[0] = ost->pix_fmts[1] = PIX_FMT_NONE;
|
||||
|
||||
return ost;
|
||||
}
|
||||
|
||||
|
3
avplay.c
3
avplay.c
@ -1701,9 +1701,10 @@ static AVFilter input_filter =
|
||||
|
||||
static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters)
|
||||
{
|
||||
static const enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
|
||||
char sws_flags_str[128];
|
||||
int ret;
|
||||
SinkContext sink_ctx = { .pix_fmt = PIX_FMT_YUV420P };
|
||||
SinkContext sink_ctx = { .pix_fmts = pix_fmts };
|
||||
AVFilterContext *filt_src = NULL, *filt_out = NULL;
|
||||
snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
|
||||
graph->scale_sws_opts = av_strdup(sws_flags_str);
|
||||
|
@ -1036,9 +1036,11 @@ static void null_end_frame(AVFilterLink *inlink) { }
|
||||
static int sink_query_formats(AVFilterContext *ctx)
|
||||
{
|
||||
SinkContext *priv = ctx->priv;
|
||||
enum PixelFormat pix_fmts[] = { priv->pix_fmt, PIX_FMT_NONE };
|
||||
|
||||
avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
|
||||
if (priv->pix_fmts)
|
||||
avfilter_set_common_formats(ctx, avfilter_make_format_list(priv->pix_fmts));
|
||||
else
|
||||
avfilter_default_query_formats(ctx);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -368,7 +368,7 @@ FILE *get_preset_file(char *filename, size_t filename_size,
|
||||
const char *preset_name, int is_path, const char *codec_name);
|
||||
|
||||
typedef struct {
|
||||
enum PixelFormat pix_fmt;
|
||||
const enum PixelFormat *pix_fmts;
|
||||
} SinkContext;
|
||||
|
||||
extern AVFilter sink;
|
||||
|
@ -1,3 +1,3 @@
|
||||
b838561f7df803ea14dd6307a9d3c5ec *./tests/data/images/xwd/02.xwd
|
||||
./tests/data/images/xwd/%02d.xwd CRC=0x69b329cd
|
||||
405615 ./tests/data/images/xwd/02.xwd
|
||||
50baa5560b7d1aa3188b19c1162bf7dc *./tests/data/images/xwd/02.xwd
|
||||
./tests/data/images/xwd/%02d.xwd CRC=0x6da01946
|
||||
304239 ./tests/data/images/xwd/02.xwd
|
||||
|
Loading…
Reference in New Issue
Block a user