1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-11-26 19:01:44 +02:00

Merge remote-tracking branch 'qatar/master'

* qatar/master:
  isom: sort and pretty-print codec_movaudio_tags[]
  isom: remove pointless comments in codec_movaudio_tags[]
  isom: remove commented-out tag for vorbis
  movenc: write 'chan' tag for AC-3 in MOV
  mov: add support for reading and writing the 'chan' tag
  audioconvert: add some additional channel and channel layout macros
  audioconvert: change 7.1 "wide" layout to use side surround channels
  movenc: simplify handling of pcm vs. adpcm vs. other compressed codecs
  doc: update documentation to use avconv
  doc: update demuxers section
  doc: extend external library coverage
  doc: split platform specific information
  doc: port the git-howto to texinfo
  doc: provide fallback css and customize @float
  doc: document fate in a texinfo
  doxy: change hue value to match our green

Conflicts:
	doc/fate.txt
	doc/ffserver.texi
	doc/general.texi
	doc/muxers.texi
	doc/protocols.texi
	doc/t2h.init
	libavformat/isom.c
	libavformat/mov.c
	libavutil/avutil.h
	tests/ref/acodec/pcm_s16be
	tests/ref/acodec/pcm_s24be
	tests/ref/acodec/pcm_s32be
	tests/ref/acodec/pcm_s8
	tests/ref/lavf/mov

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2011-12-10 01:25:15 +01:00
commit c59b80c8d3
27 changed files with 1786 additions and 541 deletions

View File

@ -820,7 +820,7 @@ HTML_STYLESHEET =
# 180 is cyan, 240 is blue, 300 purple, and 360 is red again.
# The allowed range is 0 to 359.
HTML_COLORSTYLE_HUE = 220
HTML_COLORSTYLE_HUE = 120
# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of
# the colors in the HTML output. For a value of 0 the output will use

View File

@ -3,8 +3,11 @@ PODPAGES = $(PROGS-yes:%=doc/%.pod)
HTMLPAGES = $(PROGS-yes:%=doc/%.html) \
doc/developer.html \
doc/faq.html \
doc/fate.html \
doc/general.html \
doc/git-howto.html \
doc/libavfilter.html \
doc/platform.html \
DOCS = $(HTMLPAGES) $(MANPAGES) $(PODPAGES)

View File

@ -132,8 +132,8 @@ libavcodec libraries. To see the list of available AVOptions, use the
@option{-help} option. They are separated into two categories:
@table @option
@item generic
These options can be set for any container, codec or device. Generic options are
listed under AVFormatContext options for containers/devices and under
These options can be set for any container, codec or device. Generic options
are listed under AVFormatContext options for containers/devices and under
AVCodecContext options for codecs.
@item private
These options are specific to the given container, device or codec. Private
@ -150,8 +150,8 @@ ffmpeg -i input.flac -id3v2_version 3 out.mp3
All codec AVOptions are obviously per-stream, so the chapter on stream
specifiers applies to them
Note -nooption syntax cannot be used for boolean AVOptions, use -option
0/-option 1.
Note @option{-nooption} syntax cannot be used for boolean AVOptions,
use @option{-option 0}/@option{-option 1}.
Note2 old undocumented way of specifying per-stream AVOptions by prepending
v/a/s to the options name is now obsolete and will be removed soon.

View File

@ -34,7 +34,7 @@ JPEG image. The individual frames can be extracted without loss,
e.g. by
@example
ffmpeg -i ../some_mjpeg.avi -vcodec copy frames_%d.jpg
ffmpeg -i ../some_mjpeg.avi -c:v copy frames_%d.jpg
@end example
Unfortunately, these chunks are incomplete JPEG images, because
@ -57,9 +57,9 @@ stream (carrying the AVI1 header ID and lacking a DHT segment) to
produce fully qualified JPEG images.
@example
ffmpeg -i mjpeg-movie.avi -vcodec copy -vbsf mjpeg2jpeg frame_%d.jpg
ffmpeg -i mjpeg-movie.avi -c:v copy -vbsf mjpeg2jpeg frame_%d.jpg
exiftran -i -9 frame*.jpg
ffmpeg -i frame_%d.jpg -vcodec copy rotated.avi
ffmpeg -i frame_%d.jpg -c:v copy rotated.avi
@end example
@section mjpega_dump_header

View File

@ -49,19 +49,19 @@ sequence of filenames of the form @file{i%m%g-1.jpg},
The size, the pixel format, and the format of each image must be the
same for all the files in the sequence.
The following example shows how to use @file{ffmpeg} for creating a
The following example shows how to use @command{ffmpeg} for creating a
video from the images in the file sequence @file{img-001.jpeg},
@file{img-002.jpeg}, ..., assuming an input frame rate of 10 frames per
second:
@example
ffmpeg -r 10 -f image2 -i 'img-%03d.jpeg' out.avi
ffmpeg -i 'img-%03d.jpeg' -r 10 out.mkv
@end example
Note that the pattern must not necessarily contain "%d" or
"%0@var{N}d", for example to convert a single image file
@file{img.jpeg} you can employ the command:
@example
ffmpeg -f image2 -i img.jpeg img.png
ffmpeg -i img.jpeg img.png
@end example
@section applehttp

135
doc/fate.texi Normal file
View File

@ -0,0 +1,135 @@
\input texinfo @c -*- texinfo -*-
@settitle FATE Automated Testing Environment
@titlepage
@center @titlefont{FATE Automated Testing Environment}
@end titlepage
@top
@contents
@chapter Introduction
FATE provides a regression testsuite embedded within the FFmpeg build system.
It can be run locally and optionally configured to send reports to a web
aggregator and viewer @url{http://fate.ffmpeg.org}.
It is advised to run FATE before submitting patches to the current codebase
and provide new tests when submitting patches to add additional features.
@chapter Running FATE
@section Samples and References
In order to run, FATE needs a large amount of data (samples and references)
that is provided separately from the actual source distribution.
To inform the build system about the testsuite location, pass
@option{--samples=<path to the samples>} to @command{configure} or set the
@var{SAMPLES} Make variable or the @var{FATE_SAMPLES} environment variable
to a suitable value.
The dataset is available through @command{rsync}, is possible to fetch
the current sample using the straight rsync command or through a specific
@ref{Makefile target}.
@example
# rsync -aL rsync://fate.ffmpeg.org/fate-suite/ fate-suite
@end example
@example
# make fate-rsync SAMPLES=fate-suite
@end example
@chapter Manual Run
FATE regression test can be run through @command{make}.
Specific Makefile targets and Makefile variables are available:
@anchor{Makefile target}
@section FATE Makefile targets
@table @option
@item fate-list
List all fate/regression test targets.
@item fate-rsync
Shortcut to download the fate test samples to the specified testsuite location.
@item fate
Run the FATE test suite (requires the fate-suite dataset).
@end table
@section Fate Makefile variables
@table @option
@item V
Verbosity level, can be set to 0, 1 or 2.
@table @option
@item 0
show just the test arguments
@item 1
show just the command used in the test
@item 2
show everything
@end table
@item SAMPLES
Specify or override the path to the FATE samples at make time, it has a
meaning only while running the regression tests.
@item THREADS
Specify how many threads to use while running regression tests, it is
quite useful to detect thread-related regressions.
@end table
@example
make V=1 SAMPLES=/var/fate/samples THREADS=2 fate
@end example
@chapter Automated Tests
In order to automatically testing specific configurations, e.g. multiple
compilers, @command{tests/fate.sh} is provided.
This shell script builds FFmpeg, runs the regression tests and prepares a
report that can be sent to @url{fate.ffmpeg.org} or directly examined locally.
@section Testing Profiles
The configuration file passed to @command{fate.sh} is shell scripts as well.
It must provide at least a @var{slot} identifier, the @var{repo} from
which fetch the sources, the @var{samples} directory, a @var{workdir} with
enough space to build and run all the tests.
Optional submit command @var{fate_recv} and a @var{comment} to describe
the testing profile are available.
Additional optional parameter to tune the FFmpeg building and reporting process
can be passed.
@example
slot= # some unique identifier
repo=git://source.ffmpeg.org/ffmpeg.git # the source repository
samples=/path/to/fate/samples
workdir= # directory in which to do all the work
fate_recv="ssh -T fate@fate.ffmpeg.org" # command to submit report
comment= # optional description
# the following are optional and map to configure options
arch=
cpu=
cross_prefix=
cc=
target_os=
sysroot=
target_exec=
target_path=
extra_cflags=
extra_ldflags=
extra_libs=
extra_conf= # extra configure options not covered above
#make= # name of GNU make if not 'make'
makeopts= # extra options passed to 'make'
#tar= # command to create a tar archive from its arguments on
# stdout, defaults to 'tar c'
@end example
@section Submitting Reports
In order to send reports you need to create an @command{ssh} key and send it
to the fate server administrator.
The current server fingerprint is @var{b1:31:c8:79:3f:04:1d:f8:f2:23:26:5a:fd:55:fa:92}

View File

@ -34,7 +34,7 @@ file.
This documentation covers only the streaming aspects of ffserver /
ffmpeg. All questions about parameters for ffmpeg, codec questions,
etc. are not covered here. Read @file{ffmpeg-doc.html} for more
etc. are not covered here. Read @file{ffmpeg.html} for more
information.
@section How does it work?
@ -265,7 +265,7 @@ rather than as a daemon.
@c man begin SEEALSO
ffmpeg(1), ffplay(1), ffprobe(1), the @file{ffmpeg/doc/ffserver.conf}
ffmpeg(1), ffplay(1), ffprobe(1), the @file{ffserver.conf}
example and the FFmpeg HTML documentation
@c man end

View File

@ -9,7 +9,7 @@
@contents
@chapter external libraries
@chapter External libraries
FFmpeg can be hooked up with a number of external libraries to add support
for more formats. None of them are used by default, their use has to be
@ -23,20 +23,75 @@ instructions. To enable using OpenJPEG in FFmpeg, pass @code{--enable-libopenjp
@file{./configure}.
@section OpenCORE AMR
@section OpenCORE and VisualOn libraries
Spun off Google Android sources, OpenCore and VisualOn libraries provide
encoders for a number of audio codecs.
@float NOTE
OpenCORE and VisualOn libraries are under the Apache License 2.0
(see @url{http://www.apache.org/licenses/LICENSE-2.0} for details), which is
incompatible with the LGPL version 2.1 and GPL version 2. You have to
upgrade FFmpeg's license to LGPL version 3 (or if you have enabled
GPL components, GPL version 3) to use it.
@end float
@subsection OpenCORE AMR
FFmpeg can make use of the OpenCORE libraries for AMR-NB
decoding/encoding and AMR-WB decoding.
Go to @url{http://sourceforge.net/projects/opencore-amr/} and follow the instructions for
installing the libraries. Then pass @code{--enable-libopencore-amrnb} and/or
@code{--enable-libopencore-amrwb} to configure to enable the libraries.
Go to @url{http://sourceforge.net/projects/opencore-amr/} and follow the
instructions for installing the libraries.
Then pass @code{--enable-libopencore-amrnb} and/or
@code{--enable-libopencore-amrwb} to configure to enable them.
@subsection VisualOn AAC encoder library
FFmpeg can make use of the VisualOn AACenc library for AAC encoding.
Go to @url{http://sourceforge.net/projects/opencore-amr/} and follow the
instructions for installing the library.
Then pass @code{--enable-libvo-aacenc} to configure to enable it.
@subsection VisualOn AMR-WB encoder library
FFmpeg can make use of the VisualOn AMR-WBenc library for AMR-WB encoding.
Go to @url{http://sourceforge.net/projects/opencore-amr/} and follow the
instructions for installing the library.
Then pass @code{--enable-libvo-amrwbenc} to configure to enable it.
@section LAME
FFmpeg can make use of the LAME library for MP3 encoding.
Go to @url{http://lame.sourceforge.net/} and follow the
instructions for installing the library.
Then pass @code{--enable-libmp3lame} to configure to enable it.
@section libvpx
FFmpeg can make use of the libvpx library for VP8 encoding.
Go to @url{http://www.webmproject.org/} and follow the instructions for
installing the library. Then pass @code{--enable-libvpx} to configure to
enable it.
@section x264
FFmpeg can make use of the x264 library for H.264 encoding.
Go to @url{http://www.videolan.org/developers/x264.html} and follow the
instructions for installing the library. Then pass @code{--enable-libx264} to
configure to enable it.
@float NOTE
x264 is under the GNU Public License Version 2 or later
(see @url{http://www.gnu.org/licenses/old-licenses/gpl-2.0.html} for
details), you must upgrade FFmpeg's license to GPL in order to use it.
@end float
Note that OpenCORE is under the Apache License 2.0 (see
@url{http://www.apache.org/licenses/LICENSE-2.0} for details), which is
incompatible with the LGPL version 2.1 and GPL version 2. You have to
upgrade FFmpeg's license to LGPL version 3 (or if you have enabled
GPL components, GPL version 3) to use it.
@chapter Supported File Formats and Codecs
@ -797,378 +852,4 @@ performance on systems without hardware floating point support).
@code{X} means that input/output is supported.
@chapter Platform Specific information
@section DOS
Using a cross-compiler is preferred for various reasons.
@section OS/2
For information about compiling FFmpeg on OS/2 see
@url{http://www.edm2.com/index.php/FFmpeg}.
@section Unix-like
Some parts of FFmpeg cannot be built with version 2.15 of the GNU
assembler which is still provided by a few AMD64 distributions. To
make sure your compiler really uses the required version of gas
after a binutils upgrade, run:
@example
$(gcc -print-prog-name=as) --version
@end example
If not, then you should install a different compiler that has no
hard-coded path to gas. In the worst case pass @code{--disable-asm}
to configure.
@subsection BSD
BSD make will not build FFmpeg, you need to install and use GNU Make
(@file{gmake}).
@subsection (Open)Solaris
GNU Make is required to build FFmpeg, so you have to invoke (@file{gmake}),
standard Solaris Make will not work. When building with a non-c99 front-end
(gcc, generic suncc) add either @code{--extra-libs=/usr/lib/values-xpg6.o}
or @code{--extra-libs=/usr/lib/64/values-xpg6.o} to the configure options
since the libc is not c99-compliant by default. The probes performed by
configure may raise an exception leading to the death of configure itself
due to a bug in the system shell. Simply invoke a different shell such as
bash directly to work around this:
@example
bash ./configure
@end example
@anchor{Darwin}
@subsection Darwin (OSX, iPhone)
The toolchain provided with Xcode is sufficient to build the basic
unacelerated code.
OSX on PowerPC or ARM (iPhone) requires a preprocessor from
@url{http://github.com/yuvi/gas-preprocessor} to build the optimized
assembler functions. Just download the Perl script and put it somewhere
in your PATH, FFmpeg's configure will pick it up automatically.
OSX on amd64 and x86 requires @command{yasm} to build most of the
optimized assembler functions @url{http://mxcl.github.com/homebrew/, Homebrew},
@url{http://www.gentoo.org/proj/en/gentoo-alt/prefix/bootstrap-macos.xml, Gentoo Prefix}
or @url{http://www.macports.org, MacPorts} can easily provide it.
@section Windows
To get help and instructions for building FFmpeg under Windows, check out
the FFmpeg Windows Help Forum at
@url{http://ffmpeg.arrozcru.org/}.
@subsection Native Windows compilation
FFmpeg can be built to run natively on Windows using the MinGW tools. Install
the latest versions of MSYS and MinGW from @url{http://www.mingw.org/}.
You can find detailed installation instructions in the download
section and the FAQ.
FFmpeg does not build out-of-the-box with the packages the automated MinGW
installer provides. It also requires coreutils to be installed and many other
packages updated to the latest version. The minimum version for some packages
are listed below:
@itemize
@item bash 3.1
@item msys-make 3.81-2 (note: not mingw32-make)
@item w32api 3.13
@item mingw-runtime 3.15
@end itemize
FFmpeg automatically passes @code{-fno-common} to the compiler to work around
a GCC bug (see @url{http://gcc.gnu.org/bugzilla/show_bug.cgi?id=37216}).
Notes:
@itemize
@item Building natively using MSYS can be sped up by disabling implicit rules
in the Makefile by calling @code{make -r} instead of plain @code{make}. This
speed up is close to non-existent for normal one-off builds and is only
noticeable when running make for a second time (for example in
@code{make install}).
@item In order to compile FFplay, you must have the MinGW development library
of @uref{http://www.libsdl.org/, SDL}.
Edit the @file{bin/sdl-config} script so that it points to the correct prefix
where SDL was installed. Verify that @file{sdl-config} can be launched from
the MSYS command line.
@item By using @code{./configure --enable-shared} when configuring FFmpeg,
you can build the FFmpeg libraries (e.g. libavutil, libavcodec,
libavformat) as DLLs.
@end itemize
@subsection Microsoft Visual C++ compatibility
As stated in the FAQ, FFmpeg will not compile under MSVC++. However, if you
want to use the libav* libraries in your own applications, you can still
compile those applications using MSVC++. But the libav* libraries you link
to @emph{must} be built with MinGW. However, you will not be able to debug
inside the libav* libraries, since MSVC++ does not recognize the debug
symbols generated by GCC.
We strongly recommend you to move over from MSVC++ to MinGW tools.
This description of how to use the FFmpeg libraries with MSVC++ is based on
Microsoft Visual C++ 2005 Express Edition. If you have a different version,
you might have to modify the procedures slightly.
@subsubsection Using static libraries
Assuming you have just built and installed FFmpeg in @file{/usr/local}.
@enumerate
@item Create a new console application ("File / New / Project") and then
select "Win32 Console Application". On the appropriate page of the
Application Wizard, uncheck the "Precompiled headers" option.
@item Write the source code for your application, or, for testing, just
copy the code from an existing sample application into the source file
that MSVC++ has already created for you. For example, you can copy
@file{libavformat/output-example.c} from the FFmpeg distribution.
@item Open the "Project / Properties" dialog box. In the "Configuration"
combo box, select "All Configurations" so that the changes you make will
affect both debug and release builds. In the tree view on the left hand
side, select "C/C++ / General", then edit the "Additional Include
Directories" setting to contain the path where the FFmpeg includes were
installed (i.e. @file{c:\msys\1.0\local\include}).
Do not add MinGW's include directory here, or the include files will
conflict with MSVC's.
@item Still in the "Project / Properties" dialog box, select
"Linker / General" from the tree view and edit the
"Additional Library Directories" setting to contain the @file{lib}
directory where FFmpeg was installed (i.e. @file{c:\msys\1.0\local\lib}),
the directory where MinGW libs are installed (i.e. @file{c:\mingw\lib}),
and the directory where MinGW's GCC libs are installed
(i.e. @file{C:\mingw\lib\gcc\mingw32\4.2.1-sjlj}). Then select
"Linker / Input" from the tree view, and add the files @file{libavformat.a},
@file{libavcodec.a}, @file{libavutil.a}, @file{libmingwex.a},
@file{libgcc.a}, and any other libraries you used (i.e. @file{libz.a})
to the end of "Additional Dependencies".
@item Now, select "C/C++ / Code Generation" from the tree view. Select
"Debug" in the "Configuration" combo box. Make sure that "Runtime
Library" is set to "Multi-threaded Debug DLL". Then, select "Release" in
the "Configuration" combo box and make sure that "Runtime Library" is
set to "Multi-threaded DLL".
@item Click "OK" to close the "Project / Properties" dialog box.
@item MSVC++ lacks some C99 header files that are fundamental for FFmpeg.
Get msinttypes from @url{http://code.google.com/p/msinttypes/downloads/list}
and install it in MSVC++'s include directory
(i.e. @file{C:\Program Files\Microsoft Visual Studio 8\VC\include}).
@item MSVC++ also does not understand the @code{inline} keyword used by
FFmpeg, so you must add this line before @code{#include}ing libav*:
@example
#define inline _inline
@end example
@item Build your application, everything should work.
@end enumerate
@subsubsection Using shared libraries
This is how to create DLL and LIB files that are compatible with MSVC++:
@enumerate
@item Add a call to @file{vcvars32.bat} (which sets up the environment
variables for the Visual C++ tools) as the first line of @file{msys.bat}.
The standard location for @file{vcvars32.bat} is
@file{C:\Program Files\Microsoft Visual Studio 8\VC\bin\vcvars32.bat},
and the standard location for @file{msys.bat} is @file{C:\msys\1.0\msys.bat}.
If this corresponds to your setup, add the following line as the first line
of @file{msys.bat}:
@example
call "C:\Program Files\Microsoft Visual Studio 8\VC\bin\vcvars32.bat"
@end example
Alternatively, you may start the @file{Visual Studio 2005 Command Prompt},
and run @file{c:\msys\1.0\msys.bat} from there.
@item Within the MSYS shell, run @code{lib.exe}. If you get a help message
from @file{Microsoft (R) Library Manager}, this means your environment
variables are set up correctly, the @file{Microsoft (R) Library Manager}
is on the path and will be used by FFmpeg to create
MSVC++-compatible import libraries.
@item Build FFmpeg with
@example
./configure --enable-shared
make
make install
@end example
Your install path (@file{/usr/local/} by default) should now have the
necessary DLL and LIB files under the @file{bin} directory.
Alternatively, build the libraries with a cross compiler, according to
the instructions below in @ref{Cross compilation for Windows with Linux}.
To use those files with MSVC++, do the same as you would do with
the static libraries, as described above. But in Step 4,
you should only need to add the directory where the LIB files are installed
(i.e. @file{c:\msys\usr\local\bin}). This is not a typo, the LIB files are
installed in the @file{bin} directory. And instead of adding the static
libraries (@file{libxxx.a} files) you should add the MSVC import libraries
(@file{avcodec.lib}, @file{avformat.lib}, and
@file{avutil.lib}). Note that you should not use the GCC import
libraries (@file{libxxx.dll.a} files), as these will give you undefined
reference errors. There should be no need for @file{libmingwex.a},
@file{libgcc.a}, and @file{wsock32.lib}, nor any other external library
statically linked into the DLLs.
FFmpeg headers do not declare global data for Windows DLLs through the usual
dllexport/dllimport interface. Such data will be exported properly while
building, but to use them in your MSVC++ code you will have to edit the
appropriate headers and mark the data as dllimport. For example, in
libavutil/pixdesc.h you should have:
@example
extern __declspec(dllimport) const AVPixFmtDescriptor av_pix_fmt_descriptors[];
@end example
Note that using import libraries created by dlltool requires
the linker optimization option to be set to
"References: Keep Unreferenced Data (@code{/OPT:NOREF})", otherwise
the resulting binaries will fail during runtime. This isn't
required when using import libraries generated by lib.exe.
This issue is reported upstream at
@url{http://sourceware.org/bugzilla/show_bug.cgi?id=12633}.
To create import libraries that work with the @code{/OPT:REF} option
(which is enabled by default in Release mode), follow these steps:
@enumerate
@item Open @file{Visual Studio 2005 Command Prompt}.
Alternatively, in a normal command line prompt, call @file{vcvars32.bat}
which sets up the environment variables for the Visual C++ tools
(the standard location for this file is
@file{C:\Program Files\Microsoft Visual Studio 8\VC\bin\vcvars32.bat}).
@item Enter the @file{bin} directory where the created LIB and DLL files
are stored.
@item Generate new import libraries with @file{lib.exe}:
@example
lib /machine:i386 /def:..\lib\avcodec-53.def /out:avcodec.lib
lib /machine:i386 /def:..\lib\avdevice-53.def /out:avdevice.lib
lib /machine:i386 /def:..\lib\avfilter-2.def /out:avfilter.lib
lib /machine:i386 /def:..\lib\avformat-53.def /out:avformat.lib
lib /machine:i386 /def:..\lib\avutil-51.def /out:avutil.lib
lib /machine:i386 /def:..\lib\swscale-2.def /out:swscale.lib
@end example
@end enumerate
@anchor{Cross compilation for Windows with Linux}
@subsection Cross compilation for Windows with Linux
You must use the MinGW cross compilation tools available at
@url{http://www.mingw.org/}.
Then configure FFmpeg with the following options:
@example
./configure --target-os=mingw32 --cross-prefix=i386-mingw32msvc-
@end example
(you can change the cross-prefix according to the prefix chosen for the
MinGW tools).
Then you can easily test FFmpeg with @uref{http://www.winehq.com/, Wine}.
@subsection Compilation under Cygwin
Please use Cygwin 1.7.x as the obsolete 1.5.x Cygwin versions lack
llrint() in its C library.
Install your Cygwin with all the "Base" packages, plus the
following "Devel" ones:
@example
binutils, gcc4-core, make, git, mingw-runtime, texi2html
@end example
And the following "Utils" one:
@example
diffutils
@end example
Then run
@example
./configure
@end example
to make a static build.
The current @code{gcc4-core} package is buggy and needs this flag to build
shared libraries:
@example
./configure --enable-shared --disable-static --extra-cflags=-fno-reorder-functions
@end example
If you want to build FFmpeg with additional libraries, download Cygwin
"Devel" packages for Ogg and Vorbis from any Cygwin packages repository:
@example
libogg-devel, libvorbis-devel
@end example
These library packages are only available from
@uref{http://sourceware.org/cygwinports/, Cygwin Ports}:
@example
yasm, libSDL-devel, libdirac-devel, libfaac-devel, libaacplus-devel, libgsm-devel,
libmp3lame-devel, libschroedinger1.0-devel, speex-devel, libtheora-devel,
libxvidcore-devel
@end example
The recommendation for libnut and x264 is to build them from source by
yourself, as they evolve too quickly for Cygwin Ports to be up to date.
Cygwin 1.7.x has IPv6 support. You can add IPv6 to Cygwin 1.5.x by means
of the @code{libgetaddrinfo-devel} package, available at Cygwin Ports.
@subsection Cross compilation for Windows under Cygwin
With Cygwin you can create Windows binaries that do not need the cygwin1.dll.
Just install your Cygwin as explained before, plus these additional
"Devel" packages:
@example
gcc-mingw-core, mingw-runtime, mingw-zlib
@end example
and add some special flags to your configure invocation.
For a static build run
@example
./configure --target-os=mingw32 --extra-cflags=-mno-cygwin --extra-libs=-mno-cygwin
@end example
and for a build with shared libraries
@example
./configure --target-os=mingw32 --enable-shared --disable-static --extra-cflags=-mno-cygwin --extra-libs=-mno-cygwin
@end example
@bye

344
doc/git-howto.texi Normal file
View File

@ -0,0 +1,344 @@
\input texinfo @c -*- texinfo -*-
@settitle Using git to develop FFmpeg
@titlepage
@center @titlefont{Using git to develop FFmpeg}
@end titlepage
@top
@contents
@chapter Introduction
This document aims in giving some quick references on a set of useful git
commands. You should always use the extensive and detailed documentation
provided directly by git:
@example
git --help
man git
@end example
shows you the available subcommands,
@example
git <command> --help
man git-<command>
@end example
shows information about the subcommand <command>.
Additional information could be found on the
@url{http://gitref.org, Git Reference} website
For more information about the Git project, visit the
@url{http://git-scm.com/, Git website}
Consult these resources whenever you have problems, they are quite exhaustive.
What follows now is a basic introduction to Git and some FFmpeg-specific
guidelines to ease the contribution to the project
@chapter Basics Usage
@section Get GIT
You can get git from @url{http://git-scm.com/}
Most distribution and operating system provide a package for it.
@section Cloning the source tree
@example
git clone git://source.ffmpeg.org/ffmpeg <target>
@end example
This will put the FFmpeg sources into the directory @var{<target>}.
@example
git clone git@@source.ffmpeg.org:ffmpeg <target>
@end example
This will put the FFmpeg sources into the directory @var{<target>} and let
you push back your changes to the remote repository.
@section Updating the source tree to the latest revision
@example
git pull (--rebase)
@end example
pulls in the latest changes from the tracked branch. The tracked branch
can be remote. By default the master branch tracks the branch master in
the remote origin.
@float IMPORTANT
@command{--rebase} (see below) is recommended.
@end float
@section Rebasing your local branches
@example
git pull --rebase
@end example
fetches the changes from the main repository and replays your local commits
over it. This is required to keep all your local changes at the top of
FFmpeg's master tree. The master tree will reject pushes with merge commits.
@section Adding/removing files/directories
@example
git add [-A] <filename/dirname>
git rm [-r] <filename/dirname>
@end example
GIT needs to get notified of all changes you make to your working
directory that makes files appear or disappear.
Line moves across files are automatically tracked.
@section Showing modifications
@example
git diff <filename(s)>
@end example
will show all local modifications in your working directory as unified diff.
@section Inspecting the changelog
@example
git log <filename(s)>
@end example
You may also use the graphical tools like gitview or gitk or the web
interface available at http://source.ffmpeg.org/
@section Checking source tree status
@example
git status
@end example
detects all the changes you made and lists what actions will be taken in case
of a commit (additions, modifications, deletions, etc.).
@section Committing
@example
git diff --check
@end example
to double check your changes before committing them to avoid trouble later
on. All experienced developers do this on each and every commit, no matter
how small.
Every one of them has been saved from looking like a fool by this many times.
It's very easy for stray debug output or cosmetic modifications to slip in,
please avoid problems through this extra level of scrutiny.
For cosmetics-only commits you should get (almost) empty output from
@example
git diff -w -b <filename(s)>
@end example
Also check the output of
@example
git status
@end example
to make sure you don't have untracked files or deletions.
@example
git add [-i|-p|-A] <filenames/dirnames>
@end example
Make sure you have told git your name and email address
@example
git config --global user.name "My Name"
git config --global user.email my@@email.invalid
@end example
Use @var{--global} to set the global configuration for all your git checkouts.
Git will select the changes to the files for commit. Optionally you can use
the interactive or the patch mode to select hunk by hunk what should be
added to the commit.
@example
git commit
@end example
Git will commit the selected changes to your current local branch.
You will be prompted for a log message in an editor, which is either
set in your personal configuration file through
@example
git config --global core.editor
@end example
or set by one of the following environment variables:
@var{GIT_EDITOR}, @var{VISUAL} or @var{EDITOR}.
Log messages should be concise but descriptive. Explain why you made a change,
what you did will be obvious from the changes themselves most of the time.
Saying just "bug fix" or "10l" is bad. Remember that people of varying skill
levels look at and educate themselves while reading through your code. Don't
include filenames in log messages, Git provides that information.
Possibly make the commit message have a terse, descriptive first line, an
empty line and then a full description. The first line will be used to name
the patch by git format-patch.
@section Preparing a patchset
@example
git format-patch <commit> [-o directory]
@end example
will generate a set of patches for each commit between @var{<commit>} and
current @var{HEAD}. E.g.
@example
git format-patch origin/master
@end example
will generate patches for all commits on current branch which are not
present in upstream.
A useful shortcut is also
@example
git format-patch -n
@end example
which will generate patches from last @var{n} commits.
By default the patches are created in the current directory.
@section Sending patches for review
@example
git send-email <commit list|directory>
@end example
will send the patches created by @command{git format-patch} or directly
generates them. All the email fields can be configured in the global/local
configuration or overridden by command line.
Note that this tool must often be installed separately (e.g. @var{git-email}
package on Debian-based distros).
@section Renaming/moving/copying files or contents of files
Git automatically tracks such changes, making those normal commits.
@example
mv/cp path/file otherpath/otherfile
git add [-A] .
git commit
@end example
@chapter FFmpeg specific
@section Reverting broken commits
@example
git reset <commit>
@end example
@command{git reset} will uncommit the changes till @var{<commit>} rewriting
the current branch history.
@example
git commit --amend
@end example
allows to amend the last commit details quickly.
@example
git rebase -i origin/master
@end example
will replay local commits over the main repository allowing to edit, merge
or remove some of them in the process.
@float NOTE
@command{git reset}, @command{git commit --amend} and @command{git rebase}
rewrite history, so you should use them ONLY on your local or topic branches.
The main repository will reject those changes.
@end float
@example
git revert <commit>
@end example
@command{git revert} will generate a revert commit. This will not make the
faulty commit disappear from the history.
@section Pushing changes to remote trees
@example
git push
@end example
Will push the changes to the default remote (@var{origin}).
Git will prevent you from pushing changes if the local and remote trees are
out of sync. Refer to and to sync the local tree.
@example
git remote add <name> <url>
@end example
Will add additional remote with a name reference, it is useful if you want
to push your local branch for review on a remote host.
@example
git push <remote> <refspec>
@end example
Will push the changes to the @var{<remote>} repository.
Omitting @var{<refspec>} makes @command{git push} update all the remote
branches matching the local ones.
@section Finding a specific svn revision
Since version 1.7.1 git supports @var{:/foo} syntax for specifying commits
based on a regular expression. see man gitrevisions
@example
git show :/'as revision 23456'
@end example
will show the svn changeset @var{r23456}. With older git versions searching in
the @command{git log} output is the easiest option (especially if a pager with
search capabilities is used).
This commit can be checked out with
@example
git checkout -b svn_23456 :/'as revision 23456'
@end example
or for git < 1.7.1 with
@example
git checkout -b svn_23456 $SHA1
@end example
where @var{$SHA1} is the commit hash from the @command{git log} output.
@chapter Server Issues
Contact the project admins @email{root@@ffmpeg.org} if you have technical
problems with the GIT server.

View File

@ -42,7 +42,7 @@ specify card number or identifier, device number and subdevice number
To see the list of cards currently recognized by your system check the
files @file{/proc/asound/cards} and @file{/proc/asound/devices}.
For example to capture with @file{ffmpeg} from an ALSA device with
For example to capture with @command{ffmpeg} from an ALSA device with
card id 0, you may run the command:
@example
ffmpeg -f alsa -i hw:0 alsaout.wav
@ -167,14 +167,14 @@ For more detailed information read the file
Documentation/fb/framebuffer.txt included in the Linux source tree.
To record from the framebuffer device @file{/dev/fb0} with
@file{ffmpeg}:
@command{ffmpeg}:
@example
ffmpeg -f fbdev -r 10 -i /dev/fb0 out.avi
@end example
You can take a single screenshot image with the command:
@example
ffmpeg -f fbdev -vframes 1 -r 1 -i /dev/fb0 screenshot.jpeg
ffmpeg -f fbdev -frames:v 1 -r 1 -i /dev/fb0 screenshot.jpeg
@end example
See also @url{http://linux-fbdev.sourceforge.net/}, and fbset(1).
@ -204,7 +204,7 @@ To list the JACK clients and their properties you can invoke the command
@file{jack_lsp}.
Follows an example which shows how to capture a JACK readable client
with @file{ffmpeg}.
with @command{ffmpeg}.
@example
# Create a JACK writable client with name "ffmpeg".
$ ffmpeg -f jack -i ffmpeg -y out.wav
@ -395,7 +395,7 @@ The filename to provide to the input device is the device node
representing the OSS input device, and is usually set to
@file{/dev/dsp}.
For example to grab from @file{/dev/dsp} using @file{ffmpeg} use the
For example to grab from @file{/dev/dsp} using @command{ffmpeg} use the
command:
@example
ffmpeg -f oss -i /dev/dsp /tmp/oss.wav
@ -418,7 +418,7 @@ To list the pulse source devices and their properties you can invoke
the command @file{pactl list sources}.
@example
avconv -f pulse -i default /tmp/pulse.wav
ffmpeg -f pulse -i default /tmp/pulse.wav
@end example
@subsection @var{server} AVOption
@ -498,7 +498,7 @@ The filename to provide to the input device is the device node
representing the sndio input device, and is usually set to
@file{/dev/audio0}.
For example to grab from @file{/dev/audio0} using @file{ffmpeg} use the
For example to grab from @file{/dev/audio0} using @command{ffmpeg} use the
command:
@example
ffmpeg -f sndio -i /dev/audio0 /tmp/oss.wav
@ -582,7 +582,7 @@ Check the X11 documentation (e.g. man X) for more detailed information.
Use the @file{dpyinfo} program for getting basic information about the
properties of your X11 display (e.g. grep for "name" or "dimensions").
For example to grab from @file{:0.0} using @file{ffmpeg}:
For example to grab from @file{:0.0} using @command{ffmpeg}:
@example
ffmpeg -f x11grab -r 25 -s cif -i :0.0 out.mpg

View File

@ -43,12 +43,12 @@ You can print the CRC to stdout with the command:
ffmpeg -i INPUT -f crc -
@end example
You can select the output format of each frame with @file{ffmpeg} by
You can select the output format of each frame with @command{ffmpeg} by
specifying the audio and video codec and format. For example to
compute the CRC of the input audio converted to PCM unsigned 8-bit
and the input video converted to MPEG-2 video, use the command:
@example
ffmpeg -i INPUT -acodec pcm_u8 -vcodec mpeg2video -f crc -
ffmpeg -i INPUT -c:a pcm_u8 -c:v mpeg2video -f crc -
@end example
See also the @ref{framecrc} muxer.
@ -79,13 +79,13 @@ You can print the CRC of each decoded frame to stdout with the command:
ffmpeg -i INPUT -f framecrc -
@end example
You can select the output format of each frame with @file{ffmpeg} by
You can select the output format of each frame with @command{ffmpeg} by
specifying the audio and video codec and format. For example, to
compute the CRC of each decoded input audio frame converted to PCM
unsigned 8-bit and of each decoded input video frame converted to
MPEG-2 video, use the command:
@example
ffmpeg -i INPUT -acodec pcm_u8 -vcodec mpeg2video -f framecrc -
ffmpeg -i INPUT -c:a pcm_u8 -c:v mpeg2video -f framecrc -
@end example
See also the @ref{crc} muxer.
@ -119,26 +119,26 @@ The pattern "img%%-%d.jpg" will specify a sequence of filenames of the
form @file{img%-1.jpg}, @file{img%-2.jpg}, ..., @file{img%-10.jpg},
etc.
The following example shows how to use @file{ffmpeg} for creating a
The following example shows how to use @command{ffmpeg} for creating a
sequence of files @file{img-001.jpeg}, @file{img-002.jpeg}, ...,
taking one image every second from the input video:
@example
ffmpeg -i in.avi -r 1 -f image2 'img-%03d.jpeg'
ffmpeg -i in.avi -vsync 1 -r 1 -f image2 'img-%03d.jpeg'
@end example
Note that with @file{ffmpeg}, if the format is not specified with the
Note that with @command{ffmpeg}, if the format is not specified with the
@code{-f} option and the output filename specifies an image file
format, the image2 muxer is automatically selected, so the previous
command can be written as:
@example
ffmpeg -i in.avi -r 1 'img-%03d.jpeg'
ffmpeg -i in.avi -vsync 1 -r 1 'img-%03d.jpeg'
@end example
Note also that the pattern must not necessarily contain "%d" or
"%0@var{N}d", for example to create a single image file
@file{img.jpeg} from the input video you can employ the command:
@example
ffmpeg -i in.avi -f image2 -vframes 1 img.jpeg
ffmpeg -i in.avi -f image2 -frames:v 1 img.jpeg
@end example
The image muxer supports the .Y.U.V image file format. This format is
@ -189,7 +189,7 @@ and @code{service_name}. If they are not set the default for
@code{service_name} is "Service01".
@example
ffmpeg -i file.mpg -acodec copy -vcodec copy \
ffmpeg -i file.mpg -c copy \
-mpegts_original_network_id 0x1122 \
-mpegts_transport_stream_id 0x3344 \
-mpegts_service_id 0x5566 \
@ -207,14 +207,14 @@ Null muxer.
This muxer does not generate any output file, it is mainly useful for
testing or benchmarking purposes.
For example to benchmark decoding with @file{ffmpeg} you can use the
For example to benchmark decoding with @command{ffmpeg} you can use the
command:
@example
ffmpeg -benchmark -i INPUT -f null out.null
@end example
Note that the above command does not read or write the @file{out.null}
file, but specifying the output file is required by the @file{ffmpeg}
file, but specifying the output file is required by the @command{ffmpeg}
syntax.
Alternatively you can write the command as:
@ -282,7 +282,7 @@ Both eyes laced in one Block, Right-eye view is first
For example a 3D WebM clip can be created using the following command line:
@example
ffmpeg -i sample_left_right_clip.mpg -an -vcodec libvpx -metadata stereo_mode=left_right -y stereo_clip.webm
ffmpeg -i sample_left_right_clip.mpg -an -c:v libvpx -metadata stereo_mode=left_right -y stereo_clip.webm
@end example
@c man end MUXERS

387
doc/platform.texi Normal file
View File

@ -0,0 +1,387 @@
\input texinfo @c -*- texinfo -*-
@settitle Platform Specific information
@titlepage
@center @titlefont{Platform Specific information}
@end titlepage
@top
@contents
@chapter Unix-like
Some parts of FFmpeg cannot be built with version 2.15 of the GNU
assembler which is still provided by a few AMD64 distributions. To
make sure your compiler really uses the required version of gas
after a binutils upgrade, run:
@example
$(gcc -print-prog-name=as) --version
@end example
If not, then you should install a different compiler that has no
hard-coded path to gas. In the worst case pass @code{--disable-asm}
to configure.
@section BSD
BSD make will not build FFmpeg, you need to install and use GNU Make
(@file{gmake}).
@section (Open)Solaris
GNU Make is required to build FFmpeg, so you have to invoke (@file{gmake}),
standard Solaris Make will not work. When building with a non-c99 front-end
(gcc, generic suncc) add either @code{--extra-libs=/usr/lib/values-xpg6.o}
or @code{--extra-libs=/usr/lib/64/values-xpg6.o} to the configure options
since the libc is not c99-compliant by default. The probes performed by
configure may raise an exception leading to the death of configure itself
due to a bug in the system shell. Simply invoke a different shell such as
bash directly to work around this:
@example
bash ./configure
@end example
@anchor{Darwin}
@section Darwin (OSX, iPhone)
The toolchain provided with Xcode is sufficient to build the basic
unacelerated code.
OSX on PowerPC or ARM (iPhone) requires a preprocessor from
@url{http://github.com/yuvi/gas-preprocessor} to build the optimized
assembler functions. Just download the Perl script and put it somewhere
in your PATH, FFmpeg's configure will pick it up automatically.
OSX on amd64 and x86 requires @command{yasm} to build most of the
optimized assembler functions @url{http://mxcl.github.com/homebrew/, Homebrew},
@url{http://www.gentoo.org/proj/en/gentoo-alt/prefix/bootstrap-macos.xml, Gentoo Prefix}
or @url{http://www.macports.org, MacPorts} can easily provide it.
@chapter DOS
Using a cross-compiler is preferred for various reasons.
@url{http://www.delorie.com/howto/djgpp/linux-x-djgpp.html}
@chapter OS/2
For information about compiling FFmpeg on OS/2 see
@url{http://www.edm2.com/index.php/FFmpeg}.
@chapter Windows
To get help and instructions for building FFmpeg under Windows, check out
the FFmpeg Windows Help Forum at
@url{http://ffmpeg.arrozcru.org/}.
@section Native Windows compilation
FFmpeg can be built to run natively on Windows using the MinGW tools. Install
the latest versions of MSYS and MinGW from @url{http://www.mingw.org/}.
You can find detailed installation instructions in the download
section and the FAQ.
FFmpeg does not build out-of-the-box with the packages the automated MinGW
installer provides. It also requires coreutils to be installed and many other
packages updated to the latest version. The minimum version for some packages
are listed below:
@itemize
@item bash 3.1
@item msys-make 3.81-2 (note: not mingw32-make)
@item w32api 3.13
@item mingw-runtime 3.15
@end itemize
FFmpeg automatically passes @code{-fno-common} to the compiler to work around
a GCC bug (see @url{http://gcc.gnu.org/bugzilla/show_bug.cgi?id=37216}).
Notes:
@itemize
@item Building natively using MSYS can be sped up by disabling implicit rules
in the Makefile by calling @code{make -r} instead of plain @code{make}. This
speed up is close to non-existent for normal one-off builds and is only
noticeable when running make for a second time (for example in
@code{make install}).
@item In order to compile FFplay, you must have the MinGW development library
of @uref{http://www.libsdl.org/, SDL}.
Edit the @file{bin/sdl-config} script so that it points to the correct prefix
where SDL was installed. Verify that @file{sdl-config} can be launched from
the MSYS command line.
@item By using @code{./configure --enable-shared} when configuring FFmpeg,
you can build the FFmpeg libraries (e.g. libavutil, libavcodec,
libavformat) as DLLs.
@end itemize
@section Microsoft Visual C++ compatibility
As stated in the FAQ, FFmpeg will not compile under MSVC++. However, if you
want to use the libav* libraries in your own applications, you can still
compile those applications using MSVC++. But the libav* libraries you link
to @emph{must} be built with MinGW. However, you will not be able to debug
inside the libav* libraries, since MSVC++ does not recognize the debug
symbols generated by GCC.
We strongly recommend you to move over from MSVC++ to MinGW tools.
This description of how to use the FFmpeg libraries with MSVC++ is based on
Microsoft Visual C++ 2005 Express Edition. If you have a different version,
you might have to modify the procedures slightly.
@subsection Using static libraries
Assuming you have just built and installed FFmpeg in @file{/usr/local}.
@enumerate
@item Create a new console application ("File / New / Project") and then
select "Win32 Console Application". On the appropriate page of the
Application Wizard, uncheck the "Precompiled headers" option.
@item Write the source code for your application, or, for testing, just
copy the code from an existing sample application into the source file
that MSVC++ has already created for you. For example, you can copy
@file{libavformat/output-example.c} from the FFmpeg distribution.
@item Open the "Project / Properties" dialog box. In the "Configuration"
combo box, select "All Configurations" so that the changes you make will
affect both debug and release builds. In the tree view on the left hand
side, select "C/C++ / General", then edit the "Additional Include
Directories" setting to contain the path where the FFmpeg includes were
installed (i.e. @file{c:\msys\1.0\local\include}).
Do not add MinGW's include directory here, or the include files will
conflict with MSVC's.
@item Still in the "Project / Properties" dialog box, select
"Linker / General" from the tree view and edit the
"Additional Library Directories" setting to contain the @file{lib}
directory where FFmpeg was installed (i.e. @file{c:\msys\1.0\local\lib}),
the directory where MinGW libs are installed (i.e. @file{c:\mingw\lib}),
and the directory where MinGW's GCC libs are installed
(i.e. @file{C:\mingw\lib\gcc\mingw32\4.2.1-sjlj}). Then select
"Linker / Input" from the tree view, and add the files @file{libavformat.a},
@file{libavcodec.a}, @file{libavutil.a}, @file{libmingwex.a},
@file{libgcc.a}, and any other libraries you used (i.e. @file{libz.a})
to the end of "Additional Dependencies".
@item Now, select "C/C++ / Code Generation" from the tree view. Select
"Debug" in the "Configuration" combo box. Make sure that "Runtime
Library" is set to "Multi-threaded Debug DLL". Then, select "Release" in
the "Configuration" combo box and make sure that "Runtime Library" is
set to "Multi-threaded DLL".
@item Click "OK" to close the "Project / Properties" dialog box.
@item MSVC++ lacks some C99 header files that are fundamental for FFmpeg.
Get msinttypes from @url{http://code.google.com/p/msinttypes/downloads/list}
and install it in MSVC++'s include directory
(i.e. @file{C:\Program Files\Microsoft Visual Studio 8\VC\include}).
@item MSVC++ also does not understand the @code{inline} keyword used by
FFmpeg, so you must add this line before @code{#include}ing libav*:
@example
#define inline _inline
@end example
@item Build your application, everything should work.
@end enumerate
@subsection Using shared libraries
This is how to create DLL and LIB files that are compatible with MSVC++:
@enumerate
@item Add a call to @file{vcvars32.bat} (which sets up the environment
variables for the Visual C++ tools) as the first line of @file{msys.bat}.
The standard location for @file{vcvars32.bat} is
@file{C:\Program Files\Microsoft Visual Studio 8\VC\bin\vcvars32.bat},
and the standard location for @file{msys.bat} is @file{C:\msys\1.0\msys.bat}.
If this corresponds to your setup, add the following line as the first line
of @file{msys.bat}:
@example
call "C:\Program Files\Microsoft Visual Studio 8\VC\bin\vcvars32.bat"
@end example
Alternatively, you may start the @file{Visual Studio 2005 Command Prompt},
and run @file{c:\msys\1.0\msys.bat} from there.
@item Within the MSYS shell, run @code{lib.exe}. If you get a help message
from @file{Microsoft (R) Library Manager}, this means your environment
variables are set up correctly, the @file{Microsoft (R) Library Manager}
is on the path and will be used by FFmpeg to create
MSVC++-compatible import libraries.
@item Build FFmpeg with
@example
./configure --enable-shared
make
make install
@end example
Your install path (@file{/usr/local/} by default) should now have the
necessary DLL and LIB files under the @file{bin} directory.
Alternatively, build the libraries with a cross compiler, according to
the instructions below in @ref{Cross compilation for Windows with Linux}.
To use those files with MSVC++, do the same as you would do with
the static libraries, as described above. But in Step 4,
you should only need to add the directory where the LIB files are installed
(i.e. @file{c:\msys\usr\local\bin}). This is not a typo, the LIB files are
installed in the @file{bin} directory. And instead of adding the static
libraries (@file{libxxx.a} files) you should add the MSVC import libraries
(@file{avcodec.lib}, @file{avformat.lib}, and
@file{avutil.lib}). Note that you should not use the GCC import
libraries (@file{libxxx.dll.a} files), as these will give you undefined
reference errors. There should be no need for @file{libmingwex.a},
@file{libgcc.a}, and @file{wsock32.lib}, nor any other external library
statically linked into the DLLs.
FFmpeg headers do not declare global data for Windows DLLs through the usual
dllexport/dllimport interface. Such data will be exported properly while
building, but to use them in your MSVC++ code you will have to edit the
appropriate headers and mark the data as dllimport. For example, in
libavutil/pixdesc.h you should have:
@example
extern __declspec(dllimport) const AVPixFmtDescriptor av_pix_fmt_descriptors[];
@end example
Note that using import libraries created by dlltool requires
the linker optimization option to be set to
"References: Keep Unreferenced Data (@code{/OPT:NOREF})", otherwise
the resulting binaries will fail during runtime. This isn't
required when using import libraries generated by lib.exe.
This issue is reported upstream at
@url{http://sourceware.org/bugzilla/show_bug.cgi?id=12633}.
To create import libraries that work with the @code{/OPT:REF} option
(which is enabled by default in Release mode), follow these steps:
@enumerate
@item Open @file{Visual Studio 2005 Command Prompt}.
Alternatively, in a normal command line prompt, call @file{vcvars32.bat}
which sets up the environment variables for the Visual C++ tools
(the standard location for this file is
@file{C:\Program Files\Microsoft Visual Studio 8\VC\bin\vcvars32.bat}).
@item Enter the @file{bin} directory where the created LIB and DLL files
are stored.
@item Generate new import libraries with @file{lib.exe}:
@example
lib /machine:i386 /def:..\lib\avcodec-53.def /out:avcodec.lib
lib /machine:i386 /def:..\lib\avdevice-53.def /out:avdevice.lib
lib /machine:i386 /def:..\lib\avfilter-2.def /out:avfilter.lib
lib /machine:i386 /def:..\lib\avformat-53.def /out:avformat.lib
lib /machine:i386 /def:..\lib\avutil-51.def /out:avutil.lib
lib /machine:i386 /def:..\lib\swscale-2.def /out:swscale.lib
@end example
@end enumerate
@anchor{Cross compilation for Windows with Linux}
@section Cross compilation for Windows with Linux
You must use the MinGW cross compilation tools available at
@url{http://www.mingw.org/}.
Then configure FFmpeg with the following options:
@example
./configure --target-os=mingw32 --cross-prefix=i386-mingw32msvc-
@end example
(you can change the cross-prefix according to the prefix chosen for the
MinGW tools).
Then you can easily test FFmpeg with @uref{http://www.winehq.com/, Wine}.
@section Compilation under Cygwin
Please use Cygwin 1.7.x as the obsolete 1.5.x Cygwin versions lack
llrint() in its C library.
Install your Cygwin with all the "Base" packages, plus the
following "Devel" ones:
@example
binutils, gcc4-core, make, git, mingw-runtime, texi2html
@end example
And the following "Utils" one:
@example
diffutils
@end example
Then run
@example
./configure
@end example
to make a static build.
The current @code{gcc4-core} package is buggy and needs this flag to build
shared libraries:
@example
./configure --enable-shared --disable-static --extra-cflags=-fno-reorder-functions
@end example
If you want to build FFmpeg with additional libraries, download Cygwin
"Devel" packages for Ogg and Vorbis from any Cygwin packages repository:
@example
libogg-devel, libvorbis-devel
@end example
These library packages are only available from
@uref{http://sourceware.org/cygwinports/, Cygwin Ports}:
@example
yasm, libSDL-devel, libdirac-devel, libfaac-devel, libaacplus-devel, libgsm-devel,
libmp3lame-devel, libschroedinger1.0-devel, speex-devel, libtheora-devel,
libxvidcore-devel
@end example
The recommendation for libnut and x264 is to build them from source by
yourself, as they evolve too quickly for Cygwin Ports to be up to date.
Cygwin 1.7.x has IPv6 support. You can add IPv6 to Cygwin 1.5.x by means
of the @code{libgetaddrinfo-devel} package, available at Cygwin Ports.
@section Crosscompilation for Windows under Cygwin
With Cygwin you can create Windows binaries that do not need the cygwin1.dll.
Just install your Cygwin as explained before, plus these additional
"Devel" packages:
@example
gcc-mingw-core, mingw-runtime, mingw-zlib
@end example
and add some special flags to your configure invocation.
For a static build run
@example
./configure --target-os=mingw32 --extra-cflags=-mno-cygwin --extra-libs=-mno-cygwin
@end example
and for a build with shared libraries
@example
./configure --target-os=mingw32 --enable-shared --disable-static --extra-cflags=-mno-cygwin --extra-libs=-mno-cygwin
@end example
@bye

View File

@ -67,7 +67,7 @@ File access protocol.
Allow to read from or read to a file.
For example to read from a file @file{input.mpeg} with @file{ffmpeg}
For example to read from a file @file{input.mpeg} with @command{ffmpeg}
use the command:
@example
ffmpeg -i file:input.mpeg output.mpeg
@ -134,14 +134,14 @@ pipe (e.g. 0 for stdin, 1 for stdout, 2 for stderr). If @var{number}
is not specified, by default the stdout file descriptor will be used
for writing, stdin for reading.
For example to read from stdin with @file{ffmpeg}:
For example to read from stdin with @command{ffmpeg}:
@example
cat test.wav | ffmpeg -i pipe:0
# ...this is the same as...
cat test.wav | ffmpeg -i pipe:
@end example
For writing to stdout with @file{ffmpeg}:
For writing to stdout with @command{ffmpeg}:
@example
ffmpeg -i test.wav -f avi pipe:1 | cat > test.avi
# ...this is the same as...
@ -219,7 +219,7 @@ meaning as specified for the RTMP native protocol.
See the librtmp manual page (man 3 librtmp) for more information.
For example, to stream a file in real-time to an RTMP server using
@file{ffmpeg}:
@command{ffmpeg}:
@example
ffmpeg -re -i myfile -f flv rtmp://myserver/live/mystream
@end example
@ -249,7 +249,7 @@ The required syntax for a RTSP url is:
rtsp://@var{hostname}[:@var{port}]/@var{path}
@end example
The following options (set on the @file{ffmpeg}/@file{ffplay} command
The following options (set on the @command{ffmpeg}/@file{ffplay} command
line, or set in code via @code{AVOption}s or in @code{avformat_open_input}),
are supported:
@ -472,7 +472,7 @@ For receiving, this gives the benefit of only receiving packets from
the specified peer address/port.
@end table
Some usage examples of the udp protocol with @file{ffmpeg} follow.
Some usage examples of the udp protocol with @command{ffmpeg} follow.
To stream over UDP to a remote endpoint:
@example

View File

@ -9,6 +9,63 @@ $EXTRA_HEAD =
<link rel="stylesheet" type="text/css" href="default.css" />
';
$CSS_LINES = <<EOT;
<style type="text/css">
<!--
a.summary-letter { text-decoration: none }
a { color: #2D6198; }
a:visited { color: #884488; }
h1 a, h2 a, h3 a { text-decoration: inherit; color: inherit; }
p { margin-left: 1em; margin-right: 1em; }
table { margin-left: 2em; }
pre { margin-left: 2em; }
#footer { text-align: center; }
#body { margin-left: 1em; margin-right: 1em; }
body { background-color: #313131; margin: 0; }
#container {
background-color: white;
color: #202020;
margin-left: 1em;
margin-right: 1em;
}
h1 {
background-color: #7BB37B;
border: 1px solid #6A996A;
color: #151515;
font-size: 1.2em;
padding-bottom: 0.2em;
padding-left: 0.4em;
padding-top: 0.2em;
}
h2 {
color: #313131;
font-size: 1.2em;
}
h3 {
color: #313131;
font-size: 0.8em;
margin-bottom: -8px;
}
.note {
margin: 1em;
border: 1px solid #bbc9d8;
background-color: #dde1e1;
}
.important {
margin: 1em;
border: 1px solid #d26767;
background-color: #f8e1e1;
}
-->
</style>
EOT
my $FFMPEG_NAVBAR = $ENV{"FFMPEG_NAVBAR"} || '';
@ -31,6 +88,35 @@ sub FFmpeg_print_page_foot($$)
print $fh "</div>\n";
}
$float = \&FFmpeg_float;
sub FFmpeg_float($$$$)
{
my $text = shift;
my $float = shift;
my $caption = shift;
my $shortcaption = shift;
my $label = '';
if (exists($float->{'id'}))
{
$label = &$anchor($float->{'id'});
}
my $class = '';
my $subject = '';
if ($caption =~ /NOTE/)
{
$class = "note";
}
elsif ($caption =~ /IMPORTANT/)
{
$class = "important";
}
return '<div class="float ' . $class . '">' . "$label\n" . $text . '</div>';
}
$print_page_head = \&FFmpeg_print_page_head;
sub FFmpeg_print_page_head($$)
{

View File

@ -148,9 +148,10 @@ OBJS-$(CONFIG_MLP_MUXER) += rawenc.o
OBJS-$(CONFIG_MM_DEMUXER) += mm.o
OBJS-$(CONFIG_MMF_DEMUXER) += mmf.o pcm.o
OBJS-$(CONFIG_MMF_MUXER) += mmf.o riff.o
OBJS-$(CONFIG_MOV_DEMUXER) += mov.o riff.o isom.o
OBJS-$(CONFIG_MOV_DEMUXER) += mov.o riff.o isom.o mov_chan.o
OBJS-$(CONFIG_MOV_MUXER) += movenc.o riff.o isom.o avc.o \
movenchint.o rtpenc_chain.o
movenchint.o rtpenc_chain.o \
mov_chan.o
OBJS-$(CONFIG_MP2_MUXER) += mp3enc.o rawenc.o
OBJS-$(CONFIG_MP3_DEMUXER) += mp3dec.o
OBJS-$(CONFIG_MP3_MUXER) += mp3enc.o rawenc.o id3v2enc.o

View File

@ -224,64 +224,49 @@ const AVCodecTag codec_movvideo_tags[] = {
};
const AVCodecTag codec_movaudio_tags[] = {
{ CODEC_ID_PCM_S32BE, MKTAG('i', 'n', '3', '2') },
{ CODEC_ID_PCM_S32LE, MKTAG('i', 'n', '3', '2') },
{ CODEC_ID_PCM_S24BE, MKTAG('i', 'n', '2', '4') },
{ CODEC_ID_PCM_S24LE, MKTAG('i', 'n', '2', '4') },
{ CODEC_ID_PCM_S16BE, MKTAG('t', 'w', 'o', 's') }, /* 16 bits */
{ CODEC_ID_PCM_S16LE, MKTAG('s', 'o', 'w', 't') }, /* */
{ CODEC_ID_PCM_S16LE, MKTAG('l', 'p', 'c', 'm') },
{ CODEC_ID_PCM_F32BE, MKTAG('f', 'l', '3', '2') },
{ CODEC_ID_PCM_F32LE, MKTAG('f', 'l', '3', '2') },
{ CODEC_ID_PCM_F64BE, MKTAG('f', 'l', '6', '4') },
{ CODEC_ID_PCM_F64LE, MKTAG('f', 'l', '6', '4') },
{ CODEC_ID_PCM_S8, MKTAG('s', 'o', 'w', 't') },
{ CODEC_ID_PCM_U8, MKTAG('r', 'a', 'w', ' ') }, /* 8 bits unsigned */
{ CODEC_ID_PCM_U8, MKTAG('N', 'O', 'N', 'E') }, /* uncompressed */
{ CODEC_ID_PCM_MULAW, MKTAG('u', 'l', 'a', 'w') }, /* */
{ CODEC_ID_PCM_ALAW, MKTAG('a', 'l', 'a', 'w') }, /* */
{ CODEC_ID_ADPCM_IMA_QT, MKTAG('i', 'm', 'a', '4') }, /* IMA-4 ADPCM */
{ CODEC_ID_MACE3, MKTAG('M', 'A', 'C', '3') }, /* Macintosh Audio Compression and Expansion 3:1 */
{ CODEC_ID_MACE6, MKTAG('M', 'A', 'C', '6') }, /* Macintosh Audio Compression and Expansion 6:1 */
{ CODEC_ID_MP1, MKTAG('.', 'm', 'p', '1') }, /* MPEG layer 1 */
{ CODEC_ID_MP2, MKTAG('.', 'm', 'p', '2') }, /* MPEG layer 2 */
{ CODEC_ID_MP3, MKTAG('.', 'm', 'p', '3') }, /* MPEG layer 3 */ /* sample files at http://www.3ivx.com/showcase.html use this tag */
{ CODEC_ID_MP3, 0x6D730055 }, /* MPEG layer 3 */
/* { CODEC_ID_OGG_VORBIS, MKTAG('O', 'g', 'g', 'S') }, *//* sample files at http://heroinewarrior.com/xmovie.php3 use this tag */
{ CODEC_ID_AAC, MKTAG('m', 'p', '4', 'a') }, /* MPEG-4 AAC */
{ CODEC_ID_AC3, MKTAG('a', 'c', '-', '3') }, /* ETSI TS 102 366 Annex F */
{ CODEC_ID_AC3, MKTAG('s', 'a', 'c', '3') }, /* Nero Recode */
{ CODEC_ID_DTS, MKTAG('d', 't', 's', 'c') }, /* mp4ra.org */
{ CODEC_ID_DTS, MKTAG('D', 'T', 'S', ' ') }, /* non-standard */
{ CODEC_ID_EAC3, MKTAG('e', 'c', '-', '3') }, /* ETSI TS 102 366 Annex F */
{ CODEC_ID_AMR_NB, MKTAG('s', 'a', 'm', 'r') }, /* AMR-NB 3gp */
{ CODEC_ID_AMR_WB, MKTAG('s', 'a', 'w', 'b') }, /* AMR-WB 3gp */
{ CODEC_ID_GSM, MKTAG('a', 'g', 's', 'm') },
{ CODEC_ID_NELLYMOSER, MKTAG('n', 'm', 'o', 's') }, /* Flash Media Server */
{ CODEC_ID_ALAC, MKTAG('a', 'l', 'a', 'c') }, /* Apple Lossless */
{ CODEC_ID_QCELP, MKTAG('Q','c','l','p') },
{ CODEC_ID_QCELP, MKTAG('Q','c','l','q') },
{ CODEC_ID_QCELP, MKTAG('s','q','c','p') }, /* ISO Media fourcc */
{ CODEC_ID_QDMC, MKTAG('Q', 'D', 'M', 'C') }, /* QDMC */
{ CODEC_ID_QDM2, MKTAG('Q', 'D', 'M', '2') }, /* QDM2 */
{ CODEC_ID_DVAUDIO, MKTAG('v', 'd', 'v', 'a') },
{ CODEC_ID_DVAUDIO, MKTAG('d', 'v', 'c', 'a') },
{ CODEC_ID_SPEEX, MKTAG('s','p','e','x') }, /* Flash Media Server */
{ CODEC_ID_WMAV2, MKTAG('W', 'M', 'A', '2') },
{ CODEC_ID_AAC, MKTAG('m', 'p', '4', 'a') },
{ CODEC_ID_AC3, MKTAG('a', 'c', '-', '3') }, /* ETSI TS 102 366 Annex F */
{ CODEC_ID_AC3, MKTAG('s', 'a', 'c', '3') }, /* Nero Recode */
{ CODEC_ID_ADPCM_IMA_QT, MKTAG('i', 'm', 'a', '4') },
{ CODEC_ID_ALAC, MKTAG('a', 'l', 'a', 'c') },
{ CODEC_ID_AMR_NB, MKTAG('s', 'a', 'm', 'r') }, /* AMR-NB 3gp */
{ CODEC_ID_AMR_WB, MKTAG('s', 'a', 'w', 'b') }, /* AMR-WB 3gp */
{ CODEC_ID_DTS, MKTAG('d', 't', 's', 'c') }, /* mp4ra.org */
{ CODEC_ID_DTS, MKTAG('D', 'T', 'S', ' ') }, /* non-standard */
{ CODEC_ID_DVAUDIO, MKTAG('v', 'd', 'v', 'a') },
{ CODEC_ID_DVAUDIO, MKTAG('d', 'v', 'c', 'a') },
{ CODEC_ID_EAC3, MKTAG('e', 'c', '-', '3') }, /* ETSI TS 102 366 Annex F */
{ CODEC_ID_GSM, MKTAG('a', 'g', 's', 'm') },
{ CODEC_ID_MACE3, MKTAG('M', 'A', 'C', '3') },
{ CODEC_ID_MACE6, MKTAG('M', 'A', 'C', '6') },
{ CODEC_ID_MP1, MKTAG('.', 'm', 'p', '1') },
{ CODEC_ID_MP2, MKTAG('.', 'm', 'p', '2') },
{ CODEC_ID_MP3, MKTAG('.', 'm', 'p', '3') },
{ CODEC_ID_MP3, 0x6D730055 },
{ CODEC_ID_NELLYMOSER, MKTAG('n', 'm', 'o', 's') }, /* Flash Media Server */
{ CODEC_ID_PCM_ALAW, MKTAG('a', 'l', 'a', 'w') },
{ CODEC_ID_PCM_F32BE, MKTAG('f', 'l', '3', '2') },
{ CODEC_ID_PCM_F32LE, MKTAG('f', 'l', '3', '2') },
{ CODEC_ID_PCM_F64BE, MKTAG('f', 'l', '6', '4') },
{ CODEC_ID_PCM_F64LE, MKTAG('f', 'l', '6', '4') },
{ CODEC_ID_PCM_MULAW, MKTAG('u', 'l', 'a', 'w') },
{ CODEC_ID_PCM_S16BE, MKTAG('t', 'w', 'o', 's') },
{ CODEC_ID_PCM_S16LE, MKTAG('s', 'o', 'w', 't') },
{ CODEC_ID_PCM_S16LE, MKTAG('l', 'p', 'c', 'm') },
{ CODEC_ID_PCM_S24BE, MKTAG('i', 'n', '2', '4') },
{ CODEC_ID_PCM_S24LE, MKTAG('i', 'n', '2', '4') },
{ CODEC_ID_PCM_S32BE, MKTAG('i', 'n', '3', '2') },
{ CODEC_ID_PCM_S32LE, MKTAG('i', 'n', '3', '2') },
{ CODEC_ID_PCM_S8, MKTAG('s', 'o', 'w', 't') },
{ CODEC_ID_PCM_U8, MKTAG('r', 'a', 'w', ' ') },
{ CODEC_ID_PCM_U8, MKTAG('N', 'O', 'N', 'E') },
{ CODEC_ID_QCELP, MKTAG('Q', 'c', 'l', 'p') },
{ CODEC_ID_QCELP, MKTAG('Q', 'c', 'l', 'q') },
{ CODEC_ID_QCELP, MKTAG('s', 'q', 'c', 'p') }, /* ISO Media fourcc */
{ CODEC_ID_QDM2, MKTAG('Q', 'D', 'M', '2') },
{ CODEC_ID_QDMC, MKTAG('Q', 'D', 'M', 'C') },
{ CODEC_ID_SPEEX, MKTAG('s', 'p', 'e', 'x') }, /* Flash Media Server */
{ CODEC_ID_WMAV2, MKTAG('W', 'M', 'A', '2') },
{ CODEC_ID_NONE, 0 },
};

View File

@ -37,6 +37,7 @@
#include "isom.h"
#include "libavcodec/get_bits.h"
#include "id3v1.h"
#include "mov_chan.h"
#if CONFIG_ZLIB
#include <zlib.h>
@ -568,6 +569,51 @@ static int mov_read_dac3(MOVContext *c, AVIOContext *pb, MOVAtom atom)
return 0;
}
static int mov_read_chan(MOVContext *c, AVIOContext *pb, MOVAtom atom)
{
AVStream *st;
uint8_t version;
uint32_t flags, layout_tag, bitmap, num_descr;
if (c->fc->nb_streams < 1)
return 0;
st = c->fc->streams[c->fc->nb_streams-1];
if (atom.size < 16)
return 0;
version = avio_r8(pb);
flags = avio_rb24(pb);
layout_tag = avio_rb32(pb);
bitmap = avio_rb32(pb);
num_descr = avio_rb32(pb);
if (atom.size < 16ULL + num_descr * 20ULL)
return 0;
av_dlog(c->fc, "chan: size=%ld version=%u flags=%u layout=%u bitmap=%u num_descr=%u\n",
atom.size, version, flags, layout_tag, bitmap, num_descr);
#if 0
/* TODO: use the channel descriptions if the layout tag is 0 */
int i;
for (i = 0; i < num_descr; i++) {
uint32_t label, cflags;
float coords[3];
label = avio_rb32(pb); // mChannelLabel
cflags = avio_rb32(pb); // mChannelFlags
AV_WN32(&coords[0], avio_rl32(pb)); // mCoordinates[0]
AV_WN32(&coords[1], avio_rl32(pb)); // mCoordinates[1]
AV_WN32(&coords[2], avio_rl32(pb)); // mCoordinates[2]
}
#endif
st->codec->channel_layout = ff_mov_get_channel_layout(layout_tag, bitmap);
return 0;
}
static int mov_read_wfex(MOVContext *c, AVIOContext *pb, MOVAtom atom)
{
AVStream *st;
@ -2346,7 +2392,7 @@ static int mov_read_elst(MOVContext *c, AVIOContext *pb, MOVAtom atom)
return 0;
}
static int mov_read_chan(MOVContext *c, AVIOContext *pb, MOVAtom atom)
static int mov_read_chan2(MOVContext *c, AVIOContext *pb, MOVAtom atom)
{
if (atom.size < 16)
return 0;
@ -2409,7 +2455,7 @@ static const MOVParseTableEntry mov_default_parse_table[] = {
{ MKTAG('w','i','d','e'), mov_read_wide }, /* place holder */
{ MKTAG('w','f','e','x'), mov_read_wfex },
{ MKTAG('c','m','o','v'), mov_read_cmov },
{ MKTAG('c','h','a','n'), mov_read_chan },
{ MKTAG('c','h','a','n'), mov_read_chan }, /* channel layout */
{ 0, NULL }
};

505
libavformat/mov_chan.c Normal file
View File

@ -0,0 +1,505 @@
/*
* Copyright (c) 2011 Justin Ruggles
*
* This file is part of Libav.
*
* Libav is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Libav is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* mov 'chan' tag reading/writing.
* @author Justin Ruggles
*/
#include <stdint.h>
#include "libavutil/audioconvert.h"
#include "libavcodec/avcodec.h"
#include "mov_chan.h"
/**
* Channel Layout Tag
* This tells which channels are present in the audio stream and the order in
* which they appear.
*
* @note We're using the channel layout tag to indicate channel order
* when the value is greater than 0x10000. The Apple documentation has
* some contradictions as to how this is actually supposed to be handled.
*
* Core Audio File Format Spec:
* "The high 16 bits indicates a specific ordering of the channels."
* Core Audio Data Types Reference:
* "These identifiers specify the channels included in a layout but
* do not specify a particular ordering of those channels."
*/
enum MovChannelLayoutTag {
MOV_CH_LAYOUT_UNKNOWN = 0xFFFF0000,
MOV_CH_LAYOUT_USE_DESCRIPTIONS = ( 0 << 16) | 0,
MOV_CH_LAYOUT_USE_BITMAP = ( 1 << 16) | 0,
MOV_CH_LAYOUT_DISCRETEINORDER = (147 << 16) | 0,
MOV_CH_LAYOUT_MONO = (100 << 16) | 1,
MOV_CH_LAYOUT_STEREO = (101 << 16) | 2,
MOV_CH_LAYOUT_STEREOHEADPHONES = (102 << 16) | 2,
MOV_CH_LAYOUT_MATRIXSTEREO = (103 << 16) | 2,
MOV_CH_LAYOUT_MIDSIDE = (104 << 16) | 2,
MOV_CH_LAYOUT_XY = (105 << 16) | 2,
MOV_CH_LAYOUT_BINAURAL = (106 << 16) | 2,
MOV_CH_LAYOUT_AMBISONIC_B_FORMAT = (107 << 16) | 4,
MOV_CH_LAYOUT_QUADRAPHONIC = (108 << 16) | 4,
MOV_CH_LAYOUT_PENTAGONAL = (109 << 16) | 5,
MOV_CH_LAYOUT_HEXAGONAL = (110 << 16) | 6,
MOV_CH_LAYOUT_OCTAGONAL = (111 << 16) | 8,
MOV_CH_LAYOUT_CUBE = (112 << 16) | 8,
MOV_CH_LAYOUT_MPEG_3_0_A = (113 << 16) | 3,
MOV_CH_LAYOUT_MPEG_3_0_B = (114 << 16) | 3,
MOV_CH_LAYOUT_MPEG_4_0_A = (115 << 16) | 4,
MOV_CH_LAYOUT_MPEG_4_0_B = (116 << 16) | 4,
MOV_CH_LAYOUT_MPEG_5_0_A = (117 << 16) | 5,
MOV_CH_LAYOUT_MPEG_5_0_B = (118 << 16) | 5,
MOV_CH_LAYOUT_MPEG_5_0_C = (119 << 16) | 5,
MOV_CH_LAYOUT_MPEG_5_0_D = (120 << 16) | 5,
MOV_CH_LAYOUT_MPEG_5_1_A = (121 << 16) | 6,
MOV_CH_LAYOUT_MPEG_5_1_B = (122 << 16) | 6,
MOV_CH_LAYOUT_MPEG_5_1_C = (123 << 16) | 6,
MOV_CH_LAYOUT_MPEG_5_1_D = (124 << 16) | 6,
MOV_CH_LAYOUT_MPEG_6_1_A = (125 << 16) | 7,
MOV_CH_LAYOUT_MPEG_7_1_A = (126 << 16) | 8,
MOV_CH_LAYOUT_MPEG_7_1_B = (127 << 16) | 8,
MOV_CH_LAYOUT_MPEG_7_1_C = (128 << 16) | 8,
MOV_CH_LAYOUT_EMAGIC_DEFAULT_7_1 = (129 << 16) | 8,
MOV_CH_LAYOUT_SMPTE_DTV = (130 << 16) | 8,
MOV_CH_LAYOUT_ITU_2_1 = (131 << 16) | 3,
MOV_CH_LAYOUT_ITU_2_2 = (132 << 16) | 4,
MOV_CH_LAYOUT_DVD_4 = (133 << 16) | 3,
MOV_CH_LAYOUT_DVD_5 = (134 << 16) | 4,
MOV_CH_LAYOUT_DVD_6 = (135 << 16) | 5,
MOV_CH_LAYOUT_DVD_10 = (136 << 16) | 4,
MOV_CH_LAYOUT_DVD_11 = (137 << 16) | 5,
MOV_CH_LAYOUT_DVD_18 = (138 << 16) | 5,
MOV_CH_LAYOUT_AUDIOUNIT_6_0 = (139 << 16) | 6,
MOV_CH_LAYOUT_AUDIOUNIT_7_0 = (140 << 16) | 7,
MOV_CH_LAYOUT_AUDIOUNIT_7_0_FRONT = (148 << 16) | 7,
MOV_CH_LAYOUT_AAC_6_0 = (141 << 16) | 6,
MOV_CH_LAYOUT_AAC_6_1 = (142 << 16) | 7,
MOV_CH_LAYOUT_AAC_7_0 = (143 << 16) | 7,
MOV_CH_LAYOUT_AAC_OCTAGONAL = (144 << 16) | 8,
MOV_CH_LAYOUT_TMH_10_2_STD = (145 << 16) | 16,
MOV_CH_LAYOUT_TMH_10_2_FULL = (146 << 16) | 21,
MOV_CH_LAYOUT_AC3_1_0_1 = (149 << 16) | 2,
MOV_CH_LAYOUT_AC3_3_0 = (150 << 16) | 3,
MOV_CH_LAYOUT_AC3_3_1 = (151 << 16) | 4,
MOV_CH_LAYOUT_AC3_3_0_1 = (152 << 16) | 4,
MOV_CH_LAYOUT_AC3_2_1_1 = (153 << 16) | 4,
MOV_CH_LAYOUT_AC3_3_1_1 = (154 << 16) | 5,
MOV_CH_LAYOUT_EAC3_6_0_A = (155 << 16) | 6,
MOV_CH_LAYOUT_EAC3_7_0_A = (156 << 16) | 7,
MOV_CH_LAYOUT_EAC3_6_1_A = (157 << 16) | 7,
MOV_CH_LAYOUT_EAC3_6_1_B = (158 << 16) | 7,
MOV_CH_LAYOUT_EAC3_6_1_C = (159 << 16) | 7,
MOV_CH_LAYOUT_EAC3_7_1_A = (160 << 16) | 8,
MOV_CH_LAYOUT_EAC3_7_1_B = (161 << 16) | 8,
MOV_CH_LAYOUT_EAC3_7_1_C = (162 << 16) | 8,
MOV_CH_LAYOUT_EAC3_7_1_D = (163 << 16) | 8,
MOV_CH_LAYOUT_EAC3_7_1_E = (164 << 16) | 8,
MOV_CH_LAYOUT_EAC3_7_1_F = (165 << 16) | 8,
MOV_CH_LAYOUT_EAC3_7_1_G = (166 << 16) | 8,
MOV_CH_LAYOUT_EAC3_7_1_H = (167 << 16) | 8,
MOV_CH_LAYOUT_DTS_3_1 = (168 << 16) | 4,
MOV_CH_LAYOUT_DTS_4_1 = (169 << 16) | 5,
MOV_CH_LAYOUT_DTS_6_0_A = (170 << 16) | 6,
MOV_CH_LAYOUT_DTS_6_0_B = (171 << 16) | 6,
MOV_CH_LAYOUT_DTS_6_0_C = (172 << 16) | 6,
MOV_CH_LAYOUT_DTS_6_1_A = (173 << 16) | 7,
MOV_CH_LAYOUT_DTS_6_1_B = (174 << 16) | 7,
MOV_CH_LAYOUT_DTS_6_1_C = (175 << 16) | 7,
MOV_CH_LAYOUT_DTS_6_1_D = (182 << 16) | 7,
MOV_CH_LAYOUT_DTS_7_0 = (176 << 16) | 7,
MOV_CH_LAYOUT_DTS_7_1 = (177 << 16) | 8,
MOV_CH_LAYOUT_DTS_8_0_A = (178 << 16) | 8,
MOV_CH_LAYOUT_DTS_8_0_B = (179 << 16) | 8,
MOV_CH_LAYOUT_DTS_8_1_A = (180 << 16) | 9,
MOV_CH_LAYOUT_DTS_8_1_B = (181 << 16) | 9,
};
struct MovChannelLayoutMap {
uint32_t tag;
uint64_t layout;
};
static const struct MovChannelLayoutMap mov_ch_layout_map_misc[] = {
{ MOV_CH_LAYOUT_USE_DESCRIPTIONS, 0 },
{ MOV_CH_LAYOUT_USE_BITMAP, 0 },
{ MOV_CH_LAYOUT_DISCRETEINORDER, 0 },
{ MOV_CH_LAYOUT_UNKNOWN, 0 },
{ MOV_CH_LAYOUT_TMH_10_2_STD, 0 }, // L, R, C, Vhc, Lsd, Rsd,
// Ls, Rs, Vhl, Vhr, Lw, Rw,
// Csd, Cs, LFE1, LFE2
{ MOV_CH_LAYOUT_TMH_10_2_FULL, 0 }, // L, R, C, Vhc, Lsd, Rsd,
// Ls, Rs, Vhl, Vhr, Lw, Rw,
// Csd, Cs, LFE1, LFE2, Lc, Rc,
// HI, VI, Haptic
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_1ch[] = {
{ MOV_CH_LAYOUT_MONO, AV_CH_LAYOUT_MONO }, // C
};
static const struct MovChannelLayoutMap mov_ch_layout_map_2ch[] = {
{ MOV_CH_LAYOUT_STEREO, AV_CH_LAYOUT_STEREO }, // L, R
{ MOV_CH_LAYOUT_STEREOHEADPHONES, AV_CH_LAYOUT_STEREO }, // L, R
{ MOV_CH_LAYOUT_BINAURAL, AV_CH_LAYOUT_STEREO }, // L, R
{ MOV_CH_LAYOUT_MIDSIDE, AV_CH_LAYOUT_STEREO }, // C, sides
{ MOV_CH_LAYOUT_XY, AV_CH_LAYOUT_STEREO }, // X (left), Y (right)
{ MOV_CH_LAYOUT_MATRIXSTEREO, AV_CH_LAYOUT_STEREO_DOWNMIX }, // Lt, Rt
{ MOV_CH_LAYOUT_AC3_1_0_1, AV_CH_LAYOUT_MONO | // C, LFE
AV_CH_LOW_FREQUENCY },
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_3ch[] = {
{ MOV_CH_LAYOUT_MPEG_3_0_A, AV_CH_LAYOUT_SURROUND }, // L, R, C
{ MOV_CH_LAYOUT_MPEG_3_0_B, AV_CH_LAYOUT_SURROUND }, // C, L, R
{ MOV_CH_LAYOUT_AC3_3_0, AV_CH_LAYOUT_SURROUND }, // L, C, R
{ MOV_CH_LAYOUT_ITU_2_1, AV_CH_LAYOUT_2_1 }, // L, R, Cs
{ MOV_CH_LAYOUT_DVD_4, AV_CH_LAYOUT_2POINT1 }, // L, R, LFE
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_4ch[] = {
{ MOV_CH_LAYOUT_AMBISONIC_B_FORMAT, 0 }, // W, X, Y, Z
{ MOV_CH_LAYOUT_QUADRAPHONIC, AV_CH_LAYOUT_QUAD }, // L, R, Rls, Rrs
{ MOV_CH_LAYOUT_MPEG_4_0_A, AV_CH_LAYOUT_4POINT0 }, // L, R, C, Cs
{ MOV_CH_LAYOUT_MPEG_4_0_B, AV_CH_LAYOUT_4POINT0 }, // C, L, R, Cs
{ MOV_CH_LAYOUT_AC3_3_1, AV_CH_LAYOUT_4POINT0 }, // L, C, R, Cs
{ MOV_CH_LAYOUT_ITU_2_2, AV_CH_LAYOUT_2_2 }, // L, R, Ls, Rs
{ MOV_CH_LAYOUT_DVD_5, AV_CH_LAYOUT_2_1 | // L, R, LFE, Cs
AV_CH_LOW_FREQUENCY },
{ MOV_CH_LAYOUT_AC3_2_1_1, AV_CH_LAYOUT_2_1 | // L, R, Cs, LFE
AV_CH_LOW_FREQUENCY },
{ MOV_CH_LAYOUT_DVD_10, AV_CH_LAYOUT_3POINT1 }, // L, R, C, LFE
{ MOV_CH_LAYOUT_AC3_3_0_1, AV_CH_LAYOUT_3POINT1 }, // L, C, R, LFE
{ MOV_CH_LAYOUT_DTS_3_1, AV_CH_LAYOUT_3POINT1 }, // C, L, R, LFE
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_5ch[] = {
{ MOV_CH_LAYOUT_PENTAGONAL, AV_CH_LAYOUT_5POINT0_BACK }, // L, R, Rls, Rrs, C
{ MOV_CH_LAYOUT_MPEG_5_0_A, AV_CH_LAYOUT_5POINT0 }, // L, R, C, Ls, Rs
{ MOV_CH_LAYOUT_MPEG_5_0_B, AV_CH_LAYOUT_5POINT0 }, // L, R, Ls, Rs, C
{ MOV_CH_LAYOUT_MPEG_5_0_C, AV_CH_LAYOUT_5POINT0 }, // L, C, R, Ls, Rs
{ MOV_CH_LAYOUT_MPEG_5_0_D, AV_CH_LAYOUT_5POINT0 }, // C, L, R, Ls, Rs
{ MOV_CH_LAYOUT_DVD_6, AV_CH_LAYOUT_2_2 | // L, R, LFE, Ls, Rs
AV_CH_LOW_FREQUENCY },
{ MOV_CH_LAYOUT_DVD_18, AV_CH_LAYOUT_2_2 | // L, R, Ls, Rs, LFE
AV_CH_LOW_FREQUENCY },
{ MOV_CH_LAYOUT_DVD_11, AV_CH_LAYOUT_4POINT1 }, // L, R, C, LFE, Cs
{ MOV_CH_LAYOUT_AC3_3_1_1, AV_CH_LAYOUT_4POINT1 }, // L, C, R, Cs, LFE
{ MOV_CH_LAYOUT_DTS_4_1, AV_CH_LAYOUT_4POINT1 }, // C, L, R, Cs, LFE
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_6ch[] = {
{ MOV_CH_LAYOUT_HEXAGONAL, AV_CH_LAYOUT_HEXAGONAL }, // L, R, Rls, Rrs, C, Cs
{ MOV_CH_LAYOUT_DTS_6_0_C, AV_CH_LAYOUT_HEXAGONAL }, // C, Cs, L, R, Rls, Rrs
{ MOV_CH_LAYOUT_MPEG_5_1_A, AV_CH_LAYOUT_5POINT1 }, // L, R, C, LFE, Ls, Rs
{ MOV_CH_LAYOUT_MPEG_5_1_B, AV_CH_LAYOUT_5POINT1 }, // L, R, Ls, Rs, C, LFE
{ MOV_CH_LAYOUT_MPEG_5_1_C, AV_CH_LAYOUT_5POINT1 }, // L, C, R, Ls, Rs, LFE
{ MOV_CH_LAYOUT_MPEG_5_1_D, AV_CH_LAYOUT_5POINT1 }, // C, L, R, Ls, Rs, LFE
{ MOV_CH_LAYOUT_AUDIOUNIT_6_0, AV_CH_LAYOUT_6POINT0 }, // L, R, Ls, Rs, C, Cs
{ MOV_CH_LAYOUT_AAC_6_0, AV_CH_LAYOUT_6POINT0 }, // C, L, R, Ls, Rs, Cs
{ MOV_CH_LAYOUT_EAC3_6_0_A, AV_CH_LAYOUT_6POINT0 }, // L, C, R, Ls, Rs, Cs
{ MOV_CH_LAYOUT_DTS_6_0_A, AV_CH_LAYOUT_6POINT0_FRONT }, // Lc, Rc, L, R, Ls, Rs
{ MOV_CH_LAYOUT_DTS_6_0_B, AV_CH_LAYOUT_5POINT0_BACK | // C, L, R, Rls, Rrs, Ts
AV_CH_TOP_CENTER },
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_7ch[] = {
{ MOV_CH_LAYOUT_MPEG_6_1_A, AV_CH_LAYOUT_6POINT1 }, // L, R, C, LFE, Ls, Rs, Cs
{ MOV_CH_LAYOUT_AAC_6_1, AV_CH_LAYOUT_6POINT1 }, // C, L, R, Ls, Rs, Cs, LFE
{ MOV_CH_LAYOUT_EAC3_6_1_A, AV_CH_LAYOUT_6POINT1 }, // L, C, R, Ls, Rs, LFE, Cs
{ MOV_CH_LAYOUT_DTS_6_1_D, AV_CH_LAYOUT_6POINT1 }, // C, L, R, Ls, Rs, LFE, Cs
{ MOV_CH_LAYOUT_AUDIOUNIT_7_0, AV_CH_LAYOUT_7POINT0 }, // L, R, Ls, Rs, C, Rls, Rrs
{ MOV_CH_LAYOUT_AAC_7_0, AV_CH_LAYOUT_7POINT0 }, // C, L, R, Ls, Rs, Rls, Rrs
{ MOV_CH_LAYOUT_EAC3_7_0_A, AV_CH_LAYOUT_7POINT0 }, // L, C, R, Ls, Rs, Rls, Rrs
{ MOV_CH_LAYOUT_AUDIOUNIT_7_0_FRONT, AV_CH_LAYOUT_7POINT0_FRONT }, // L, R, Ls, Rs, C, Lc, Rc
{ MOV_CH_LAYOUT_DTS_7_0, AV_CH_LAYOUT_7POINT0_FRONT }, // Lc, C, Rc, L, R, Ls, Rs
{ MOV_CH_LAYOUT_EAC3_6_1_B, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Ts
AV_CH_TOP_CENTER },
{ MOV_CH_LAYOUT_EAC3_6_1_C, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Vhc
AV_CH_TOP_FRONT_CENTER },
{ MOV_CH_LAYOUT_DTS_6_1_A, AV_CH_LAYOUT_6POINT1_FRONT }, // Lc, Rc, L, R, Ls, Rs, LFE
{ MOV_CH_LAYOUT_DTS_6_1_B, AV_CH_LAYOUT_5POINT1_BACK | // C, L, R, Rls, Rrs, Ts, LFE
AV_CH_TOP_CENTER },
{ MOV_CH_LAYOUT_DTS_6_1_C, AV_CH_LAYOUT_6POINT1_BACK }, // C, Cs, L, R, Rls, Rrs, LFE
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_8ch[] = {
{ MOV_CH_LAYOUT_OCTAGONAL, AV_CH_LAYOUT_OCTAGONAL }, // L, R, Rls, Rrs, C, Cs, Ls, Rs
{ MOV_CH_LAYOUT_AAC_OCTAGONAL, AV_CH_LAYOUT_OCTAGONAL }, // C, L, R, Ls, Rs, Rls, Rrs, Cs
{ MOV_CH_LAYOUT_CUBE, AV_CH_LAYOUT_QUAD | // L, R, Rls, Rrs, Vhl, Vhr, Rlt, Rrt
AV_CH_TOP_FRONT_LEFT |
AV_CH_TOP_FRONT_RIGHT |
AV_CH_TOP_BACK_LEFT |
AV_CH_TOP_BACK_RIGHT },
{ MOV_CH_LAYOUT_MPEG_7_1_A, AV_CH_LAYOUT_7POINT1_WIDE }, // L, R, C, LFE, Ls, Rs, Lc, Rc
{ MOV_CH_LAYOUT_MPEG_7_1_B, AV_CH_LAYOUT_7POINT1_WIDE }, // C, Lc, Rc, L, R, Ls, Rs, LFE
{ MOV_CH_LAYOUT_EMAGIC_DEFAULT_7_1, AV_CH_LAYOUT_7POINT1_WIDE }, // L, R, Ls, Rs, C, LFE, Lc, Rc
{ MOV_CH_LAYOUT_EAC3_7_1_B, AV_CH_LAYOUT_7POINT1_WIDE }, // L, C, R, Ls, Rs, LFE, Lc, Rc
{ MOV_CH_LAYOUT_DTS_7_1, AV_CH_LAYOUT_7POINT1_WIDE }, // Lc, C, Rc, L, R, Ls, Rs, LFE
{ MOV_CH_LAYOUT_MPEG_7_1_C, AV_CH_LAYOUT_7POINT1 }, // L, R, C, LFE, Ls, Rs, Rls, Rrs
{ MOV_CH_LAYOUT_EAC3_7_1_A, AV_CH_LAYOUT_7POINT1 }, // L, C, R, Ls, Rs, LFE, Rls, Rrs
{ MOV_CH_LAYOUT_SMPTE_DTV, AV_CH_LAYOUT_5POINT1 | // L, R, C, LFE, Ls, Rs, Lt, Rt
AV_CH_LAYOUT_STEREO_DOWNMIX },
{ MOV_CH_LAYOUT_EAC3_7_1_C, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Lsd, Rsd
AV_CH_SURROUND_DIRECT_LEFT |
AV_CH_SURROUND_DIRECT_RIGHT },
{ MOV_CH_LAYOUT_EAC3_7_1_D, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Lw, Rw
AV_CH_WIDE_LEFT |
AV_CH_WIDE_RIGHT },
{ MOV_CH_LAYOUT_EAC3_7_1_E, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Vhl, Vhr
AV_CH_TOP_FRONT_LEFT |
AV_CH_TOP_FRONT_RIGHT },
{ MOV_CH_LAYOUT_EAC3_7_1_F, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Cs, Ts
AV_CH_BACK_CENTER |
AV_CH_TOP_CENTER },
{ MOV_CH_LAYOUT_EAC3_7_1_G, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Cs, Vhc
AV_CH_BACK_CENTER |
AV_CH_TOP_FRONT_CENTER },
{ MOV_CH_LAYOUT_EAC3_7_1_H, AV_CH_LAYOUT_5POINT1 | // L, C, R, Ls, Rs, LFE, Ts, Vhc
AV_CH_TOP_CENTER |
AV_CH_TOP_FRONT_CENTER },
{ MOV_CH_LAYOUT_DTS_8_0_A, AV_CH_LAYOUT_2_2 | // Lc, Rc, L, R, Ls, Rs, Rls, Rrs
AV_CH_BACK_LEFT |
AV_CH_BACK_RIGHT |
AV_CH_FRONT_LEFT_OF_CENTER |
AV_CH_FRONT_RIGHT_OF_CENTER },
{ MOV_CH_LAYOUT_DTS_8_0_B, AV_CH_LAYOUT_5POINT0 | // Lc, C, Rc, L, R, Ls, Cs, Rs
AV_CH_FRONT_LEFT_OF_CENTER |
AV_CH_FRONT_RIGHT_OF_CENTER |
AV_CH_BACK_CENTER },
{ 0, 0 },
};
static const struct MovChannelLayoutMap mov_ch_layout_map_9ch[] = {
{ MOV_CH_LAYOUT_DTS_8_1_A, AV_CH_LAYOUT_2_2 | // Lc, Rc, L, R, Ls, Rs, Rls, Rrs, LFE
AV_CH_BACK_LEFT |
AV_CH_BACK_RIGHT |
AV_CH_FRONT_LEFT_OF_CENTER |
AV_CH_FRONT_RIGHT_OF_CENTER |
AV_CH_LOW_FREQUENCY },
{ MOV_CH_LAYOUT_DTS_8_1_B, AV_CH_LAYOUT_7POINT1_WIDE | // Lc, C, Rc, L, R, Ls, Cs, Rs, LFE
AV_CH_BACK_CENTER },
{ 0, 0 },
};
static const struct MovChannelLayoutMap *mov_ch_layout_map[] = {
mov_ch_layout_map_misc,
mov_ch_layout_map_1ch,
mov_ch_layout_map_2ch,
mov_ch_layout_map_3ch,
mov_ch_layout_map_4ch,
mov_ch_layout_map_5ch,
mov_ch_layout_map_6ch,
mov_ch_layout_map_7ch,
mov_ch_layout_map_8ch,
mov_ch_layout_map_9ch,
};
static const enum MovChannelLayoutTag mov_ch_layouts_aac[] = {
MOV_CH_LAYOUT_MONO,
MOV_CH_LAYOUT_STEREO,
MOV_CH_LAYOUT_AC3_1_0_1,
MOV_CH_LAYOUT_MPEG_3_0_B,
MOV_CH_LAYOUT_ITU_2_1,
MOV_CH_LAYOUT_DVD_4,
MOV_CH_LAYOUT_QUADRAPHONIC,
MOV_CH_LAYOUT_MPEG_4_0_B,
MOV_CH_LAYOUT_ITU_2_2,
MOV_CH_LAYOUT_AC3_2_1_1,
MOV_CH_LAYOUT_DTS_3_1,
MOV_CH_LAYOUT_MPEG_5_0_D,
MOV_CH_LAYOUT_DVD_18,
MOV_CH_LAYOUT_DTS_4_1,
MOV_CH_LAYOUT_MPEG_5_1_D,
MOV_CH_LAYOUT_AAC_6_0,
MOV_CH_LAYOUT_DTS_6_0_A,
MOV_CH_LAYOUT_AAC_6_1,
MOV_CH_LAYOUT_AAC_7_0,
MOV_CH_LAYOUT_DTS_6_1_A,
MOV_CH_LAYOUT_AAC_OCTAGONAL,
MOV_CH_LAYOUT_MPEG_7_1_B,
MOV_CH_LAYOUT_DTS_8_0_A,
0,
};
static const enum MovChannelLayoutTag mov_ch_layouts_ac3[] = {
MOV_CH_LAYOUT_MONO,
MOV_CH_LAYOUT_STEREO,
MOV_CH_LAYOUT_AC3_1_0_1,
MOV_CH_LAYOUT_AC3_3_0,
MOV_CH_LAYOUT_ITU_2_1,
MOV_CH_LAYOUT_DVD_4,
MOV_CH_LAYOUT_AC3_3_1,
MOV_CH_LAYOUT_ITU_2_2,
MOV_CH_LAYOUT_AC3_2_1_1,
MOV_CH_LAYOUT_AC3_3_0_1,
MOV_CH_LAYOUT_MPEG_5_0_C,
MOV_CH_LAYOUT_DVD_18,
MOV_CH_LAYOUT_AC3_3_1_1,
MOV_CH_LAYOUT_MPEG_5_1_C,
0,
};
static const enum MovChannelLayoutTag mov_ch_layouts_alac[] = {
MOV_CH_LAYOUT_MONO,
MOV_CH_LAYOUT_STEREO,
MOV_CH_LAYOUT_MPEG_3_0_B,
MOV_CH_LAYOUT_MPEG_4_0_B,
MOV_CH_LAYOUT_MPEG_5_0_D,
MOV_CH_LAYOUT_MPEG_5_1_D,
MOV_CH_LAYOUT_AAC_6_1,
MOV_CH_LAYOUT_MPEG_7_1_B,
0,
};
static const struct {
enum CodecID codec_id;
const enum MovChannelLayoutTag *layouts;
} mov_codec_ch_layouts[] = {
{ CODEC_ID_AAC, mov_ch_layouts_aac },
{ CODEC_ID_AC3, mov_ch_layouts_ac3 },
{ CODEC_ID_ALAC, mov_ch_layouts_alac },
{ CODEC_ID_NONE, NULL },
};
uint64_t ff_mov_get_channel_layout(uint32_t tag, uint32_t bitmap)
{
int i, channels;
const struct MovChannelLayoutMap *layout_map;
/* handle the use of the channel descriptions */
/* TODO: map MOV channel labels to FFmpeg channels */
if (tag == MOV_CH_LAYOUT_USE_DESCRIPTIONS)
return 0;
/* handle the use of the channel bitmap */
if (tag == MOV_CH_LAYOUT_USE_BITMAP)
return bitmap < 0x40000 ? bitmap : 0;
/* get the layout map based on the channel count for the specified layout tag */
channels = tag & 0xFFFF;
if (channels > 9)
channels = 0;
layout_map = mov_ch_layout_map[channels];
/* find the channel layout for the specified layout tag */
for (i = 0; layout_map[i].tag != 0; i++) {
if (layout_map[i].tag == tag)
break;
}
return layout_map[i].layout;
}
uint32_t ff_mov_get_channel_layout_tag(enum CodecID codec_id,
uint64_t channel_layout,
uint32_t *bitmap)
{
int i, j;
uint32_t tag = 0;
const enum MovChannelLayoutTag *layouts = NULL;
/* find the layout list for the specified codec */
for (i = 0; mov_codec_ch_layouts[i].codec_id != CODEC_ID_NONE; i++) {
if (mov_codec_ch_layouts[i].codec_id == codec_id)
break;
}
if (mov_codec_ch_layouts[i].codec_id != CODEC_ID_NONE)
layouts = mov_codec_ch_layouts[i].layouts;
if (layouts) {
int channels;
const struct MovChannelLayoutMap *layout_map;
/* get the layout map based on the channel count */
channels = av_get_channel_layout_nb_channels(channel_layout);
if (channels > 9)
channels = 0;
layout_map = mov_ch_layout_map[channels];
/* find the layout tag for the specified channel layout */
for (i = 0; layouts[i] != 0; i++) {
if (layouts[i] & 0xFFFF != channels)
continue;
for (j = 0; layout_map[j].tag != 0; j++) {
if (layout_map[j].tag == layouts[i] &&
layout_map[j].layout == channel_layout)
break;
}
if (layout_map[j].tag)
break;
}
tag = layouts[i];
}
/* if no tag was found, use channel bitmap as a backup if possible */
if (tag == 0 && channel_layout > 0 && channel_layout < 0x40000) {
tag = MOV_CH_LAYOUT_USE_BITMAP;
*bitmap = (uint32_t)channel_layout;
} else
*bitmap = 0;
/* TODO: set channel descriptions as a secondary backup */
return tag;
}

55
libavformat/mov_chan.h Normal file
View File

@ -0,0 +1,55 @@
/*
* Copyright (c) 2011 Justin Ruggles
*
* This file is part of Libav.
*
* Libav is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Libav is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* mov 'chan' tag reading/writing.
* @author Justin Ruggles
*/
#ifndef AVFORMAT_MOV_CHAN_H
#define AVFORMAT_MOV_CHAN_H
#include <stdint.h>
#include "libavcodec/avcodec.h"
/**
* Get the channel layout for the specified channel layout tag.
*
* @param[in] tag channel layout tag
* @param[out] bitmap channel bitmap (only used if needed)
* @return channel layout
*/
uint64_t ff_mov_get_channel_layout(uint32_t tag, uint32_t bitmap);
/**
* Get the channel layout tag for the specified codec id and channel layout.
* If the layout tag was not found, use a channel bitmap if possible.
*
* @param[in] codec_id codec id
* @param[in] channel_layout channel layout
* @param[out] bitmap channel bitmap
* @return channel layout tag
*/
uint32_t ff_mov_get_channel_layout_tag(enum CodecID codec_id,
uint64_t channel_layout,
uint32_t *bitmap);
#endif /* AVFORMAT_MOV_CHAN_H */

View File

@ -37,6 +37,7 @@
#include "libavutil/opt.h"
#include "libavutil/dict.h"
#include "rtpenc.h"
#include "mov_chan.h"
#undef NDEBUG
#include <assert.h>
@ -354,6 +355,31 @@ static int mov_write_ms_tag(AVIOContext *pb, MOVTrack *track)
return updateSize(pb, pos);
}
static int mov_write_chan_tag(AVIOContext *pb, MOVTrack *track)
{
uint32_t layout_tag, bitmap;
int64_t pos = avio_tell(pb);
layout_tag = ff_mov_get_channel_layout_tag(track->enc->codec_id,
track->enc->channel_layout,
&bitmap);
if (!layout_tag) {
av_log(track->enc, AV_LOG_WARNING, "not writing 'chan' tag due to "
"lack of channel information\n");
return 0;
}
avio_wb32(pb, 0); // Size
ffio_wfourcc(pb, "chan"); // Type
avio_w8(pb, 0); // Version
avio_wb24(pb, 0); // Flags
avio_wb32(pb, layout_tag); // mChannelLayoutTag
avio_wb32(pb, bitmap); // mChannelBitmap
avio_wb32(pb, 0); // mNumberChannelDescriptions
return updateSize(pb, pos);
}
static int mov_write_wave_tag(AVIOContext *pb, MOVTrack *track)
{
int64_t pos = avio_tell(pb);
@ -376,6 +402,7 @@ static int mov_write_wave_tag(AVIOContext *pb, MOVTrack *track)
} else if (track->enc->codec_id == CODEC_ID_AMR_NB) {
mov_write_amr_tag(pb, track);
} else if (track->enc->codec_id == CODEC_ID_AC3) {
mov_write_chan_tag(pb, track);
mov_write_ac3_tag(pb, track);
} else if (track->enc->codec_id == CODEC_ID_ALAC) {
mov_write_extradata_tag(pb, track);
@ -434,15 +461,9 @@ static int mov_write_audio_tag(AVIOContext *pb, MOVTrack *track)
uint32_t tag = track->tag;
if (track->mode == MODE_MOV) {
if (track->timescale > UINT16_MAX) {
if (mov_get_lpcm_flags(track->enc->codec_id))
tag = AV_RL32("lpcm");
version = 2;
} else if (track->audio_vbr || mov_pcm_le_gt16(track->enc->codec_id) ||
track->enc->codec_id == CODEC_ID_ADPCM_MS ||
track->enc->codec_id == CODEC_ID_ADPCM_IMA_WAV) {
version = 1;
}
if (mov_get_lpcm_flags(track->enc->codec_id))
tag = AV_RL32("lpcm");
version = 2;
}
avio_wb32(pb, 0); /* size */
@ -469,34 +490,18 @@ static int mov_write_audio_tag(AVIOContext *pb, MOVTrack *track)
avio_wb32(pb, av_get_bits_per_sample(track->enc->codec_id));
avio_wb32(pb, mov_get_lpcm_flags(track->enc->codec_id));
avio_wb32(pb, track->sampleSize);
avio_wb32(pb, track->enc->frame_size);
avio_wb32(pb, track->audio_vbr ? track->enc->frame_size : 1);
} else {
if (track->mode == MODE_MOV) {
avio_wb16(pb, track->enc->channels);
if (track->enc->codec_id == CODEC_ID_PCM_U8 ||
track->enc->codec_id == CODEC_ID_PCM_S8)
avio_wb16(pb, 8); /* bits per sample */
else
avio_wb16(pb, 16);
avio_wb16(pb, track->audio_vbr ? -2 : 0); /* compression ID */
} else { /* reserved for mp4/3gp */
avio_wb16(pb, 2);
avio_wb16(pb, 16);
avio_wb16(pb, 0);
}
/* reserved for mp4/3gp */
avio_wb16(pb, 2);
avio_wb16(pb, 16);
avio_wb16(pb, 0);
avio_wb16(pb, 0); /* packet size (= 0) */
avio_wb16(pb, track->timescale); /* Time scale */
avio_wb16(pb, 0); /* Reserved */
}
if(version == 1) { /* SoundDescription V1 extended info */
avio_wb32(pb, track->enc->frame_size); /* Samples per packet */
avio_wb32(pb, track->sampleSize / track->enc->channels); /* Bytes per packet */
avio_wb32(pb, track->sampleSize); /* Bytes per frame */
avio_wb32(pb, 2); /* Bytes per sample */
}
if(track->mode == MODE_MOV &&
(track->enc->codec_id == CODEC_ID_AAC ||
track->enc->codec_id == CODEC_ID_AC3 ||
@ -2226,9 +2231,6 @@ int ff_mov_write_packet(AVFormatContext *s, AVPacket *pkt)
av_log(s, AV_LOG_ERROR, "fatal error, input is not a single packet, implement a AVParser for it\n");
return -1;
}
} else if (enc->codec_id == CODEC_ID_ADPCM_MS ||
enc->codec_id == CODEC_ID_ADPCM_IMA_WAV) {
samplesInChunk = enc->frame_size;
} else if (trk->sampleSize)
samplesInChunk = size/trk->sampleSize;
else
@ -2458,21 +2460,21 @@ static int mov_write_header(AVFormatContext *s)
"or choose different container.\n");
}else if(st->codec->codec_type == AVMEDIA_TYPE_AUDIO){
track->timescale = st->codec->sample_rate;
if(!st->codec->frame_size && !av_get_bits_per_sample(st->codec->codec_id)) {
av_log(s, AV_LOG_ERROR, "track %d: codec frame size is not set\n", i);
goto error;
}else if(st->codec->codec_id == CODEC_ID_ADPCM_MS ||
st->codec->codec_id == CODEC_ID_ADPCM_IMA_WAV){
/* set sampleSize for PCM and ADPCM */
if (av_get_bits_per_sample(st->codec->codec_id)) {
if (!st->codec->block_align) {
av_log(s, AV_LOG_ERROR, "track %d: codec block align is not set for adpcm\n", i);
av_log(s, AV_LOG_ERROR, "track %d: codec block align is not set\n", i);
goto error;
}
track->sampleSize = st->codec->block_align;
}else if(st->codec->frame_size > 1){ /* assume compressed audio */
}
/* set audio_vbr for compressed audio */
if (av_get_bits_per_sample(st->codec->codec_id) < 8) {
if (!st->codec->frame_size) {
av_log(s, AV_LOG_ERROR, "track %d: codec frame size is not set\n", i);
goto error;
}
track->audio_vbr = 1;
}else{
st->codec->frame_size = 1;
track->sampleSize = (av_get_bits_per_sample(st->codec->codec_id) >> 3) * st->codec->channels;
}
if (track->mode != MODE_MOV) {
if (track->timescale > UINT16_MAX) {

View File

@ -58,6 +58,10 @@
#define AV_CH_TOP_BACK_RIGHT 0x00020000
#define AV_CH_STEREO_LEFT 0x20000000 ///< Stereo downmix.
#define AV_CH_STEREO_RIGHT 0x40000000 ///< See AV_CH_STEREO_LEFT.
#define AV_CH_WIDE_LEFT 0x0000000080000000ULL
#define AV_CH_WIDE_RIGHT 0x0000000100000000ULL
#define AV_CH_SURROUND_DIRECT_LEFT 0x0000000200000000ULL
#define AV_CH_SURROUND_DIRECT_RIGHT 0x0000000400000000ULL
/** Channel mask value used for AVCodecContext.request_channel_layout
to indicate that the user requests the channel order of the decoder output
@ -71,18 +75,29 @@
* */
#define AV_CH_LAYOUT_MONO (AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_STEREO (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT)
#define AV_CH_LAYOUT_2POINT1 (AV_CH_LAYOUT_STEREO|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_2_1 (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_SURROUND (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_3POINT1 (AV_CH_LAYOUT_SURROUND|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_4POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_4POINT1 (AV_CH_LAYOUT_4POINT0|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_2_2 (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_QUAD (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_5POINT1 (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_5POINT0_BACK (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT1_BACK (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_6POINT0 (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT0_FRONT (AV_CH_LAYOUT_2_2|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_HEXAGONAL (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1 (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1_BACK (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1_FRONT (AV_CH_LAYOUT_6POINT0_FRONT|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_7POINT0 (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT0_FRONT (AV_CH_LAYOUT_5POINT0|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_7POINT1 (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT1_WIDE (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_7POINT1_WIDE (AV_CH_LAYOUT_5POINT1|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_OCTAGONAL (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_STEREO_DOWNMIX (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT)
/**

View File

@ -153,7 +153,7 @@
*/
#define LIBAVUTIL_VERSION_MAJOR 51
#define LIBAVUTIL_VERSION_MINOR 31
#define LIBAVUTIL_VERSION_MINOR 32
#define LIBAVUTIL_VERSION_MICRO 0
#define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \

View File

@ -1,4 +1,4 @@
aeec610f33d8eec7031c5c6a9aab4cc0 *./tests/data/acodec/pcm_s16be.mov
1059029 ./tests/data/acodec/pcm_s16be.mov
d07e475322765c20b1fcdb822ad5dc04 *./tests/data/acodec/pcm_s16be.mov
1059065 ./tests/data/acodec/pcm_s16be.mov
64151e4bcc2b717aa5a8454d424d6a1f *./tests/data/pcm_s16be.acodec.out.wav
stddev: 0.00 PSNR:999.99 MAXDIFF: 0 bytes: 1058400/ 1058400

View File

@ -1,4 +1,4 @@
9ea74aa6d09fcfe9a1e232efecfb07fe *./tests/data/acodec/pcm_s24be.mov
1588229 ./tests/data/acodec/pcm_s24be.mov
f66d9543a4e04346818e802c4f2d7a30 *./tests/data/acodec/pcm_s24be.mov
1588265 ./tests/data/acodec/pcm_s24be.mov
64151e4bcc2b717aa5a8454d424d6a1f *./tests/data/pcm_s24be.acodec.out.wav
stddev: 0.00 PSNR:999.99 MAXDIFF: 0 bytes: 1058400/ 1058400

View File

@ -1,4 +1,4 @@
25535c11babbc971b6a958e92feaee37 *./tests/data/acodec/pcm_s32be.mov
2117429 ./tests/data/acodec/pcm_s32be.mov
09c919947211de14b3ad0e7603e5b44e *./tests/data/acodec/pcm_s32be.mov
2117465 ./tests/data/acodec/pcm_s32be.mov
64151e4bcc2b717aa5a8454d424d6a1f *./tests/data/pcm_s32be.acodec.out.wav
stddev: 0.00 PSNR:999.99 MAXDIFF: 0 bytes: 1058400/ 1058400

View File

@ -1,4 +1,4 @@
d63813e9255a74324ea4559d6a8a1f7c *./tests/data/acodec/pcm_s8.mov
529829 ./tests/data/acodec/pcm_s8.mov
52d8c65c4987227979785d5ac2030175 *./tests/data/acodec/pcm_s8.mov
529865 ./tests/data/acodec/pcm_s8.mov
651d4eb8d98dfcdda96ae6c43d8f156b *./tests/data/pcm_s8.acodec.out.wav
stddev: 147.89 PSNR: 52.93 MAXDIFF: 255 bytes: 1058400/ 1058400

View File

@ -1,3 +1,3 @@
93ed3b20190daa51aeb19f62f939d04a *./tests/data/lavf/lavf.mov
357821 ./tests/data/lavf/lavf.mov
3340b7ffe1b1d98a50622bd53f786d41 *./tests/data/lavf/lavf.mov
357857 ./tests/data/lavf/lavf.mov
./tests/data/lavf/lavf.mov CRC=0x2f6a9b26