1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

Merge commit '3b266da3d35f3f7a61258b78384dfe920d875d29'

* commit '3b266da3d35f3f7a61258b78384dfe920d875d29':
  avconv: add support for complex filtergraphs.
  avconv: make filtergraphs global.
  avconv: move filtered_frame from InputStream to OutputStream.
  avconv: don't set output width/height directly from input value.
  avconv: move resample_{width,height,pix_fmt} to InputStream.
  avconv: remove a useless variable from OutputStream.
  avconv: get output pixel format from lavfi.
  graphparser: fix the order in which unlabeled input links are returned.
  avconv: change {input,output}_{streams,files} into arrays of pointers.
  avconv: don't pass input/output streams to some functions.

Conflicts:
	cmdutils.c
	cmdutils.h
	doc/ffmpeg.texi
	ffmpeg.c
	ffplay.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2012-04-17 04:01:17 +02:00
commit 4778783160
11 changed files with 923 additions and 414 deletions

View File

@ -370,6 +370,7 @@ int cmdutils_read_file(const char *filename, char **bufptr, size_t *size);
FILE *get_preset_file(char *filename, size_t filename_size,
const char *preset_name, int is_path, const char *codec_name);
/**
* Do all the necessary cleanup and abort.
* This function is implemented in the avtools, not cmdutils.

View File

@ -224,6 +224,9 @@ codec-dependent.
@var{filter_graph} is a description of the filter graph to apply to
the stream. Use @code{-filters} to show all the available filters
(including also sources and sinks).
See also the @option{-filter_complex} option if you want to create filter graphs
with multiple inputs and/or outputs.
@item -pre[:@var{stream_specifier}] @var{preset_name} (@emph{output,per-stream})
Specify the preset for matching stream(s).
@ -522,7 +525,7 @@ Synchronize read on input.
@section Advanced options
@table @option
@item -map [-]@var{input_file_id}[:@var{stream_specifier}][,@var{sync_file_id}[:@var{stream_specifier}]] (@emph{output})
@item -map [-]@var{input_file_id}[:@var{stream_specifier}][,@var{sync_file_id}[:@var{stream_specifier}]] | @var{[linklabel]} (@emph{output})
Designate one or more input streams as a source for the output file. Each input
stream is identified by the input file index @var{input_file_id} and
@ -538,6 +541,10 @@ the source for output stream 1, etc.
A @code{-} character before the stream identifier creates a "negative" mapping.
It disables matching streams from already created mappings.
An alternative @var{[linklabel]} form will map outputs from complex filter
graphs (see the @option{-filter_complex} option) to the output file.
@var{linklabel} must correspond to a defined output link label in the graph.
For example, to map ALL streams from the first input file to output
@example
ffmpeg -i INPUT -map 0 output
@ -832,6 +839,44 @@ Specify Timecode for writing. @var{SEP} is ':' for non drop timecode and ';'
@example
ffmpeg -i input.mpg -timecode 01:02:03.04 -r 30000/1001 -s ntsc output.mpg
@end example
@item -filter_complex @var{filtergraph} (@emph{global})
Define a complex filter graph, i.e. one with arbitrary number of inputs and/or
outputs. For simple graphs -- those with one input and one output of the same
type -- see the @option{-filter} options. @var{filtergraph} is a description of
the filter graph, as described in @ref{Filtergraph syntax}.
Input link labels must refer to input streams using the
@code{[file_index:stream_specifier]} syntax (i.e. the same as @option{-map}
uses). If @var{stream_specifier} matches multiple streams, the first one will be
used. An unlabeled input will be connected to the first unused input stream of
the matching type.
Output link labels are referred to with @option{-map}. Unlabeled outputs are
added to the first output file.
For example, to overlay an image over video
@example
ffmpeg -i video.mkv -i image.png -filter_complex '[0:v][1:v]overlay[out]' -map
'[out]' out.mkv
@end example
Here @code{[0:v]} refers to the first video stream in the first input file,
which is linked to the first (main) input of the overlay filter. Similarly the
first video stream in the second input is linked to the second (overlay) input
of overlay.
Assuming there is only one video stream in each input file, we can omit input
labels, so the above is equivalent to
@example
ffmpeg -i video.mkv -i image.png -filter_complex 'overlay[out]' -map
'[out]' out.mkv
@end example
Furthermore we can omit the output label and the single output from the filter
graph will be added to the output file automatically, so we can simply write
@example
ffmpeg -i video.mkv -i image.png -filter_complex 'overlay' out.mkv
@end example
@end table
@section Preset files

View File

@ -14,6 +14,7 @@ number of input and output pads of the filter.
A filter with no input pads is called a "source", a filter with no
output pads is called a "sink".
@anchor{Filtergraph syntax}
@section Filtergraph syntax
A filtergraph can be represented using a textual representation, which

1214
ffmpeg.c

File diff suppressed because it is too large Load Diff

View File

@ -1740,9 +1740,9 @@ static AVFilter input_filter =
static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters)
{
static const enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
char sws_flags_str[128];
int ret;
enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
AVBufferSinkParams *buffersink_params = av_buffersink_params_alloc();
AVFilterContext *filt_src = NULL, *filt_out = NULL;
snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);

View File

@ -242,7 +242,7 @@ static int link_filter_inouts(AVFilterContext *filt_ctx,
} else {
p->filter_ctx = filt_ctx;
p->pad_idx = pad;
insert_inout(open_inputs, p);
append_inout(open_inputs, &p);
}
}

View File

@ -165,9 +165,9 @@ static av_cold int vsink_init(AVFilterContext *ctx, const char *args, void *opaq
av_unused AVBufferSinkParams *params;
if (!opaque) {
av_log(ctx, AV_LOG_ERROR,
av_log(ctx, AV_LOG_WARNING,
"No opaque field provided\n");
return AVERROR(EINVAL);
buf->pixel_fmts = NULL;
} else {
#if FF_API_OLD_VSINK_API
const int *pixel_fmts = (const enum PixelFormat *)opaque;
@ -194,7 +194,11 @@ static int vsink_query_formats(AVFilterContext *ctx)
{
BufferSinkContext *buf = ctx->priv;
avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(buf->pixel_fmts));
if (buf->pixel_fmts)
avfilter_set_common_pixel_formats(ctx, avfilter_make_format_list(buf->pixel_fmts));
else
avfilter_default_query_formats(ctx);
return 0;
}

View File

@ -73,9 +73,9 @@ do_video_encoding mpeg2threadivlc.mpg "-qscale 10 -vcodec mpeg2video -f mpeg1vid
do_video_decoding
# mpeg2 encoding interlaced
file=${outfile}mpeg2reuse.mpg
do_avconv $file $DEC_OPTS -me_threshold 256 -i ${target_path}/${outfile}mpeg2thread.mpg $ENC_OPTS -same_quant -me_threshold 256 -mb_threshold 1024 -vcodec mpeg2video -f mpeg1video -bf 2 -flags +ildct+ilme -threads 4
do_video_decoding
#file=${outfile}mpeg2reuse.mpg
#do_avconv $file $DEC_OPTS -me_threshold 256 -i ${target_path}/${outfile}mpeg2thread.mpg $ENC_OPTS -same_quant -me_threshold 256 -mb_threshold 1024 -vcodec mpeg2video -f mpeg1video -bf 2 -flags +ildct+ilme -threads 4
#do_video_decoding
fi
if [ -n "$do_msmpeg4v2" ] ; then

View File

@ -1,46 +0,0 @@
ret: 0 st: 0 flags:1 dts: 0.000000 pts: NOPTS pos: 0 size: 20829
ret: 0 st:-1 flags:0 ts:-1.000000
ret: 0 st: 0 flags:1 dts: 0.000000 pts: NOPTS pos: 0 size: 20829
ret: 0 st:-1 flags:1 ts: 1.894167
ret: 0 st: 0 flags:1 dts: 1.840000 pts: NOPTS pos: 337078 size: 26840
ret: 0 st: 0 flags:0 ts: 0.788334
ret: 0 st: 0 flags:1 dts: 0.880000 pts: NOPTS pos: 141401 size: 23537
ret:-1 st: 0 flags:1 ts:-0.317499
ret:-1 st:-1 flags:0 ts: 2.576668
ret: 0 st:-1 flags:1 ts: 1.470835
ret: 0 st: 0 flags:1 dts: 1.360000 pts: NOPTS pos: 232037 size: 26192
ret: 0 st: 0 flags:0 ts: 0.365002
ret: 0 st: 0 flags:1 dts: 0.400000 pts: NOPTS pos: 63793 size: 21295
ret:-1 st: 0 flags:1 ts:-0.740831
ret:-1 st:-1 flags:0 ts: 2.153336
ret: 0 st:-1 flags:1 ts: 1.047503
ret: 0 st: 0 flags:1 dts: 0.880000 pts: NOPTS pos: 141401 size: 23537
ret: 0 st: 0 flags:0 ts:-0.058330
ret: 0 st: 0 flags:1 dts: 0.000000 pts: NOPTS pos: 0 size: 20829
ret: 0 st: 0 flags:1 ts: 2.835837
ret: 0 st: 0 flags:1 dts: 1.840000 pts: NOPTS pos: 337078 size: 26840
ret: 0 st:-1 flags:0 ts: 1.730004
ret: 0 st: 0 flags:1 dts: 1.840000 pts: NOPTS pos: 337078 size: 26840
ret: 0 st:-1 flags:1 ts: 0.624171
ret: 0 st: 0 flags:1 dts: 0.400000 pts: NOPTS pos: 63793 size: 21295
ret: 0 st: 0 flags:0 ts:-0.481662
ret: 0 st: 0 flags:1 dts: 0.000000 pts: NOPTS pos: 0 size: 20829
ret: 0 st: 0 flags:1 ts: 2.412505
ret: 0 st: 0 flags:1 dts: 1.840000 pts: NOPTS pos: 337078 size: 26840
ret: 0 st:-1 flags:0 ts: 1.306672
ret: 0 st: 0 flags:1 dts: 1.360000 pts: NOPTS pos: 232037 size: 26192
ret: 0 st:-1 flags:1 ts: 0.200839
ret: 0 st: 0 flags:1 dts: 0.000000 pts: NOPTS pos: 0 size: 20829
ret: 0 st: 0 flags:0 ts:-0.904994
ret: 0 st: 0 flags:1 dts: 0.000000 pts: NOPTS pos: 0 size: 20829
ret: 0 st: 0 flags:1 ts: 1.989173
ret: 0 st: 0 flags:1 dts: 1.840000 pts: NOPTS pos: 337078 size: 26840
ret: 0 st:-1 flags:0 ts: 0.883340
ret: 0 st: 0 flags:1 dts: 1.360000 pts: NOPTS pos: 232037 size: 26192
ret:-1 st:-1 flags:1 ts:-0.222493
ret:-1 st: 0 flags:0 ts: 2.671674
ret: 0 st: 0 flags:1 ts: 1.565841
ret: 0 st: 0 flags:1 dts: 1.360000 pts: NOPTS pos: 232037 size: 26192
ret: 0 st:-1 flags:0 ts: 0.460008
ret: 0 st: 0 flags:1 dts: 0.880000 pts: NOPTS pos: 141401 size: 23537
ret:-1 st:-1 flags:1 ts:-0.645825

View File

@ -2,7 +2,3 @@
791773 ./tests/data/vsynth1/mpeg2threadivlc.mpg
d1658911ca83f5616c1d32abc40750de *./tests/data/mpeg2thread_ilace.vsynth1.out.yuv
stddev: 7.63 PSNR: 30.48 MAXDIFF: 110 bytes: 7603200/ 7603200
d119fe917dd81d1ff758b4ce684a8d9d *./tests/data/vsynth1/mpeg2reuse.mpg
2074636 ./tests/data/vsynth1/mpeg2reuse.mpg
92ced6afe8c02304943c400cce51a5f4 *./tests/data/mpeg2thread_ilace.vsynth1.out.yuv
stddev: 7.66 PSNR: 30.44 MAXDIFF: 111 bytes: 7603200/ 7603200

View File

@ -2,7 +2,3 @@
178801 ./tests/data/vsynth2/mpeg2threadivlc.mpg
8c6a7ed2eb73bd18fd2bb9829464100d *./tests/data/mpeg2thread_ilace.vsynth2.out.yuv
stddev: 4.72 PSNR: 34.65 MAXDIFF: 72 bytes: 7603200/ 7603200
864d6bf2982a61e510003a518be65a2d *./tests/data/vsynth2/mpeg2reuse.mpg
383419 ./tests/data/vsynth2/mpeg2reuse.mpg
bb20fa080cfd2b0a687ea7376ff4f902 *./tests/data/mpeg2thread_ilace.vsynth2.out.yuv
stddev: 4.73 PSNR: 34.63 MAXDIFF: 72 bytes: 7603200/ 7603200