1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-28 20:53:54 +02:00

Merge remote-tracking branch 'newdev/master'

Conflicts:
	Changelog
	doc/APIchanges
	doc/optimization.txt
	libavformat/avio.h

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2011-03-17 16:53:58 +01:00
commit 0fecf2642b
174 changed files with 1109 additions and 976 deletions

View File

@ -1,5 +1,5 @@
This file contains the names of some of the people who have contributed to
FFmpeg. The names are sorted alphabetically by last name. As this file is
Libav/FFmpeg. The names are sorted alphabetically by last name. As this file is
currently quite outdated and git serves as a much better tool for determining
authorship, it remains here for historical reasons only.

View File

@ -79,6 +79,7 @@ version <next>:
- Bink version 'b' audio and video decoder
- Bitmap Brothers JV playback system
- Linux framebuffer input device added
- Apple HTTP Live Streaming protocol handler
version 0.6:

View File

@ -25,7 +25,7 @@ DOXYFILE_ENCODING = UTF-8
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
# by quotes) that should identify the project.
PROJECT_NAME = FFmpeg
PROJECT_NAME = Libav
# The PROJECT_NUMBER tag can be used to enter a project or revision number.
# This could be handy for archiving the generated documentation or

View File

@ -2,10 +2,10 @@
1) Type './configure' to create the configuration. A list of configure
options is printed by running 'configure --help'.
'configure' can be launched from a directory different from the FFmpeg
'configure' can be launched from a directory different from the Libav
sources to build the objects out of tree. To do this, use an absolute
path when launching 'configure', e.g. '/ffmpegdir/ffmpeg/configure'.
path when launching 'configure', e.g. '/libavdir/libav/configure'.
2) Then type 'make' to build FFmpeg. GNU Make 3.81 or later is required.
2) Then type 'make' to build Libav. GNU Make 3.81 or later is required.
3) Type 'make install' to install all binaries and libraries you built.

20
LICENSE
View File

@ -1,17 +1,17 @@
FFmpeg:
Libav:
-------
Most files in FFmpeg are under the GNU Lesser General Public License version 2.1
Most files in Libav are under the GNU Lesser General Public License version 2.1
or later (LGPL v2.1+). Read the file COPYING.LGPLv2.1 for details. Some other
files have MIT/X11/BSD-style licenses. In combination the LGPL v2.1+ applies to
FFmpeg.
Libav.
Some optional parts of FFmpeg are licensed under the GNU General Public License
Some optional parts of Libav are licensed under the GNU General Public License
version 2 or later (GPL v2+). See the file COPYING.GPLv2 for details. None of
these parts are used by default, you have to explicitly pass --enable-gpl to
configure to activate them. In this case, FFmpeg's license changes to GPL v2+.
configure to activate them. In this case, Libav's license changes to GPL v2+.
Specifically, the GPL parts of FFmpeg are
Specifically, the GPL parts of Libav are
- libpostproc
- optional x86 optimizations in the files
@ -33,14 +33,14 @@ external libraries:
-------------------
Some external libraries, e.g. libx264, are under GPL and can be used in
conjunction with FFmpeg. They require --enable-gpl to be passed to configure
conjunction with Libav. They require --enable-gpl to be passed to configure
as well.
The OpenCORE external libraries are under the Apache License 2.0. That license
is incompatible with the LGPL v2.1 and the GPL v2, but not with version 3 of
those licenses. So to combine the OpenCORE libraries with FFmpeg, the license
those licenses. So to combine the OpenCORE libraries with Libav, the license
version needs to be upgraded by passing --enable-version3 to configure.
The nonfree external library libfaac can be hooked up in FFmpeg. You need to
The nonfree external library libfaac can be hooked up in Libav. You need to
pass --enable-nonfree to configure to enable it. Employ this option with care
as FFmpeg then becomes nonfree and unredistributable.
as Libav then becomes nonfree and unredistributable.

View File

@ -177,7 +177,7 @@ distclean::
$(RM) version.h config.* libavutil/avconfig.h
config:
$(SRC_PATH)/configure $(value FFMPEG_CONFIGURATION)
$(SRC_PATH)/configure $(value LIBAV_CONFIGURATION)
# regression tests

2
README
View File

@ -1,4 +1,4 @@
FFmpeg README
Libav README
-------------
1) Documentation

View File

@ -399,7 +399,7 @@ static int warned_cfg = 0;
} \
if (flags & SHOW_CONFIG) { \
const char *cfg = libname##_configuration(); \
if (strcmp(FFMPEG_CONFIGURATION, cfg)) { \
if (strcmp(LIBAV_CONFIGURATION, cfg)) { \
if (!warned_cfg) { \
fprintf(outstream, \
"%sWARNING: library configuration mismatch\n", \
@ -425,17 +425,17 @@ static void print_all_libs_info(FILE* outstream, int flags)
void show_banner(void)
{
fprintf(stderr, "%s version " FFMPEG_VERSION ", Copyright (c) %d-%d the FFmpeg developers\n",
fprintf(stderr, "%s version " LIBAV_VERSION ", Copyright (c) %d-%d the Libav developers\n",
program_name, program_birth_year, this_year);
fprintf(stderr, " built on %s %s with %s %s\n",
__DATE__, __TIME__, CC_TYPE, CC_VERSION);
fprintf(stderr, " configuration: " FFMPEG_CONFIGURATION "\n");
fprintf(stderr, " configuration: " LIBAV_CONFIGURATION "\n");
print_all_libs_info(stderr, INDENT|SHOW_CONFIG);
print_all_libs_info(stderr, INDENT|SHOW_VERSION);
}
void show_version(void) {
printf("%s " FFMPEG_VERSION "\n", program_name);
printf("%s " LIBAV_VERSION "\n", program_name);
print_all_libs_info(stdout, SHOW_VERSION);
}

53
configure vendored
View File

@ -1,6 +1,6 @@
#!/bin/sh
#
# FFmpeg configure script
# Libav configure script
#
# Copyright (c) 2000-2002 Fabrice Bellard
# Copyright (c) 2005-2008 Diego Biurrun
@ -44,9 +44,9 @@ if test "$E1" != 0 || test "$E2" = 0; then
echo "No compatible shell script interpreter found."
echo "This configure script requires a POSIX-compatible shell"
echo "such as bash or ksh."
echo "THIS IS NOT A BUG IN FFMPEG, DO NOT REPORT IT AS SUCH."
echo "THIS IS NOT A BUG IN LIBAV, DO NOT REPORT IT AS SUCH."
echo "Instead, install a working POSIX-compatible shell."
echo "Disabling this configure test will create a broken FFmpeg."
echo "Disabling this configure test will create a broken Libav."
if test "$BASH_VERSION" = '2.04.0(1)-release'; then
echo "This bash version ($BASH_VERSION) is broken on your platform."
echo "Upgrade to a later version if available."
@ -238,7 +238,7 @@ Advanced options (experts only):
--enable-sram allow use of on-chip SRAM
--disable-symver disable symbol versioning
Developer options (useful when working on FFmpeg itself):
Developer options (useful when working on Libav itself):
--disable-debug disable debugging symbols
--enable-debug=LEVEL set the debug level [$debuglevel]
--disable-optimizations disable compiler optimizations
@ -280,7 +280,7 @@ die(){
If you think configure made a mistake, make sure you are using the latest
version from Git. If the latest version fails, report the problem to the
ffmpeg-user@mplayerhq.hu mailing list or IRC #ffmpeg on irc.freenode.net.
libav-user@libav.org mailing list or IRC #libav on irc.freenode.net.
EOF
if disabled logging; then
cat <<EOF
@ -1672,7 +1672,7 @@ for v in "$@"; do
r=${v#*=}
l=${v%"$r"}
r=$(sh_quote "$r")
FFMPEG_CONFIGURATION="${FFMPEG_CONFIGURATION# } ${l}${r}"
LIBAV_CONFIGURATION="${LIBAV_CONFIGURATION# } ${l}${r}"
done
find_things(){
@ -1784,7 +1784,7 @@ done
disabled logging && logfile=/dev/null
echo "# $0 $FFMPEG_CONFIGURATION" > $logfile
echo "# $0 $LIBAV_CONFIGURATION" > $logfile
set >> $logfile
test -n "$cross_prefix" && enable cross_compile
@ -2344,7 +2344,7 @@ case $target_os in
openbsd)
enable malloc_aligned
# On OpenBSD 4.5. the compiler does not use PIC unless
# explicitly using -fPIC. FFmpeg builds fine without PIC,
# explicitly using -fPIC. Libav builds fine without PIC,
# however the generated executable will not do anything
# (simply quits with exit-code 1, no crash, no output).
# Thus explicitly enable PIC here.
@ -2369,7 +2369,8 @@ case $target_os in
enable malloc_aligned
gas="gas-preprocessor.pl $cc"
enabled ppc && add_asflags -force_cpusubtype_ALL
SHFLAGS='-dynamiclib -Wl,-single_module -Wl,-install_name,$(SHLIBDIR)/$(SLIBNAME),-current_version,$(LIBVERSION),-compatibility_version,$(LIBMAJOR) -Wl,-read_only_relocs,suppress'
SHFLAGS='-dynamiclib -Wl,-single_module -Wl,-install_name,$(SHLIBDIR)/$(SLIBNAME),-current_version,$(LIBVERSION),-compatibility_version,$(LIBMAJOR)'
enabled x86_32 && append SHFLAGS -Wl,-read_only_relocs,suppress
strip="${strip} -x"
add_ldflags -Wl,-dynamic,-search_paths_first
SLIBSUF=".dylib"
@ -2478,7 +2479,7 @@ case $target_os in
;;
esac
echo "config:$arch:$subarch:$cpu:$target_os:$cc_ident:$FFMPEG_CONFIGURATION" >config.fate
echo "config:$arch:$subarch:$cpu:$target_os:$cc_ident:$LIBAV_CONFIGURATION" >config.fate
check_cpp_condition stdlib.h "defined(__PIC__) || defined(__pic__) || defined(PIC)" && enable pic
@ -3201,9 +3202,9 @@ config_files="$TMPH config.mak"
cat > config.mak <<EOF
# Automatically generated by configure - do not modify!
ifndef FFMPEG_CONFIG_MAK
FFMPEG_CONFIG_MAK=1
FFMPEG_CONFIGURATION=$FFMPEG_CONFIGURATION
ifndef LIBAV_CONFIG_MAK
LIBAV_CONFIG_MAK=1
LIBAV_CONFIGURATION=$LIBAV_CONFIGURATION
prefix=$prefix
LIBDIR=\$(DESTDIR)$libdir
SHLIBDIR=\$(DESTDIR)$shlibdir
@ -3292,10 +3293,10 @@ get_version LIBAVFILTER libavfilter/avfilter.h
cat > $TMPH <<EOF
/* Automatically generated by configure - do not modify! */
#ifndef FFMPEG_CONFIG_H
#define FFMPEG_CONFIG_H
#define FFMPEG_CONFIGURATION "$(c_escape $FFMPEG_CONFIGURATION)"
#define FFMPEG_LICENSE "$(c_escape $license)"
#ifndef LIBAV_CONFIG_H
#define LIBAV_CONFIG_H
#define LIBAV_CONFIGURATION "$(c_escape $LIBAV_CONFIGURATION)"
#define LIBAV_LICENSE "$(c_escape $license)"
#define FFMPEG_DATADIR "$(eval c_escape $datadir)"
#define CC_TYPE "$cc_type"
#define CC_VERSION $cc_version
@ -3346,8 +3347,8 @@ LAVFI_TESTS=$(print_enabled -n _test $LAVFI_TESTS)
SEEK_TESTS=$(print_enabled -n _test $SEEK_TESTS)
EOF
echo "#endif /* FFMPEG_CONFIG_H */" >> $TMPH
echo "endif # FFMPEG_CONFIG_MAK" >> config.mak
echo "#endif /* LIBAV_CONFIG_H */" >> $TMPH
echo "endif # LIBAV_CONFIG_MAK" >> config.mak
# Do not overwrite an unchanged config.h to avoid superfluous rebuilds.
cp_if_changed $TMPH config.h
@ -3411,10 +3412,10 @@ Cflags: -I\${includedir}
EOF
}
pkgconfig_generate libavutil "FFmpeg utility library" "$LIBAVUTIL_VERSION"
pkgconfig_generate libavcodec "FFmpeg codec library" "$LIBAVCODEC_VERSION" "$extralibs"
pkgconfig_generate libavformat "FFmpeg container format library" "$LIBAVFORMAT_VERSION" "$extralibs" "libavcodec = $LIBAVCODEC_VERSION"
pkgconfig_generate libavdevice "FFmpeg device handling library" "$LIBAVDEVICE_VERSION" "$extralibs" "libavformat = $LIBAVFORMAT_VERSION"
pkgconfig_generate libavfilter "FFmpeg video filtering library" "$LIBAVFILTER_VERSION" "$extralibs"
pkgconfig_generate libpostproc "FFmpeg post processing library" "$LIBPOSTPROC_VERSION"
pkgconfig_generate libswscale "FFmpeg image rescaling library" "$LIBSWSCALE_VERSION" "" "libavutil = $LIBAVUTIL_VERSION"
pkgconfig_generate libavutil "Libav utility library" "$LIBAVUTIL_VERSION"
pkgconfig_generate libavcodec "Libav codec library" "$LIBAVCODEC_VERSION" "$extralibs"
pkgconfig_generate libavformat "Libav container format library" "$LIBAVFORMAT_VERSION" "$extralibs" "libavcodec = $LIBAVCODEC_VERSION"
pkgconfig_generate libavdevice "Libav device handling library" "$LIBAVDEVICE_VERSION" "$extralibs" "libavformat = $LIBAVFORMAT_VERSION"
pkgconfig_generate libavfilter "Libav video filtering library" "$LIBAVFILTER_VERSION" "$extralibs"
pkgconfig_generate libpostproc "Libav post processing library" "$LIBPOSTPROC_VERSION"
pkgconfig_generate libswscale "Libav image rescaling library" "$LIBSWSCALE_VERSION" "" "libavutil = $LIBAVUTIL_VERSION"

View File

@ -15,6 +15,11 @@ API changes, most recent first:
2011-XX-XX - XXXXXXX - lavu XX.XXX.X - pixfmt.h
Add PIX_FMT_BGR48LE and PIX_FMT_BGR48BE pixel formats
2011-03-02 - 863c471 - lavf 52.103.0 - av_pkt_dump2, av_pkt_dump_log2
Add new functions av_pkt_dump2, av_pkt_dump_log2 that uses the
source stream timebase for outputting timestamps. Deprecate
av_pkt_dump and av_pkt_dump_log.
2011-02-20 - e731b8d - lavf 52.102.0 - avio.h
* e731b8d - rename init_put_byte() to ffio_init_context(), deprecating the
original, and move it to a private header so it is no longer

View File

@ -1,7 +1,7 @@
@chapter Bitstream Filters
@c man begin BITSTREAM FILTERS
When you configure your FFmpeg build, all the supported bitstream
When you configure your Libav build, all the supported bitstream
filters are enabled by default. You can list all available ones using
the configure option @code{--list-bsfs}.

View File

@ -1,4 +1,4 @@
FFmpeg currently uses a custom build system, this text attempts to document
Libav currently uses a custom build system, this text attempts to document
some of its obscure features and options.
Options to make:

View File

@ -1,10 +1,10 @@
@chapter Demuxers
@c man begin DEMUXERS
Demuxers are configured elements in FFmpeg which allow to read the
Demuxers are configured elements in Libav which allow to read the
multimedia streams from a particular type of file.
When you configure your FFmpeg build, all the supported demuxers
When you configure your Libav build, all the supported demuxers
are enabled by default. You can list all available ones using the
configure option "--list-demuxers".

View File

@ -32,12 +32,12 @@ generated by ./configure to understand what is needed.
You can use libavcodec or libavformat in your commercial program, but
@emph{any patch you make must be published}. The best way to proceed is
to send your patches to the FFmpeg mailing list.
to send your patches to the Libav mailing list.
@anchor{Coding Rules}
@section Coding Rules
FFmpeg is programmed in the ISO C90 language with a few additional
Libav is programmed in the ISO C90 language with a few additional
features from ISO C99, namely:
@itemize @bullet
@item
@ -54,7 +54,7 @@ These features are supported by all compilers we care about, so we will not
accept patches to remove their use unless they absolutely do not impair
clarity and performance.
All code must compile with GCC 2.95 and GCC 3.3. Currently, FFmpeg also
All code must compile with GCC 2.95 and GCC 3.3. Currently, Libav also
compiles with several other compilers, such as the Compaq ccc compiler
or Sun Studio 9, and we would like to keep it that way unless it would
be exceedingly involved. To ensure compatibility, please do not use any
@ -76,7 +76,7 @@ The TAB character is forbidden outside of Makefiles as is any
form of trailing whitespace. Commits containing either will be
rejected by the Subversion repository.
The main priority in FFmpeg is simplicity and small code size in order to
The main priority in Libav is simplicity and small code size in order to
minimize the bug count.
Comments: Use the JavaDoc/Doxygen
@ -129,7 +129,7 @@ should also be avoided if they don't make the code easier to understand.
an "or any later version" clause is also acceptable, but LGPL is
preferred.
@item
You must not commit code which breaks FFmpeg! (Meaning unfinished but
You must not commit code which breaks Libav! (Meaning unfinished but
enabled code which breaks compilation or compiles but does not work or
breaks the regression tests)
You can commit unfinished stuff (for testing etc), but it must be disabled
@ -168,7 +168,7 @@ should also be avoided if they don't make the code easier to understand.
with functional changes, such commits will be rejected and removed. Every
developer has his own indentation style, you should not change it. Of course
if you (re)write something, you can use your own style, even though we would
prefer if the indentation throughout FFmpeg was consistent (Many projects
prefer if the indentation throughout Libav was consistent (Many projects
force a given indentation style - we do not.). If you really need to make
indentation changes (try to avoid this), separate them strictly from real
changes.
@ -253,7 +253,7 @@ keeping it as a logical unit that contains an individual change, even
if it spans multiple files. This makes reviewing your patches much easier
for us and greatly increases your chances of getting your patch applied.
Use the patcheck tool of FFmpeg to check your patch.
Use the patcheck tool of Libav to check your patch.
The tool is located in the tools directory.
Run the regression tests before submitting a patch so that you can
@ -275,7 +275,7 @@ Your patch will be reviewed on the mailing list. You will likely be asked
to make some changes and are expected to send in an improved version that
incorporates the requests from the review. This process may go through
several iterations. Once your patch is deemed good enough, some developer
will pick it up and commit it to the official FFmpeg tree.
will pick it up and commit it to the official Libav tree.
Give us a few days to react. But if some time passes without reaction,
send a reminder by email. Your patch should eventually be dealt with.
@ -325,7 +325,7 @@ send a reminder by email. Your patch should eventually be dealt with.
@item
Is the patch a unified diff?
@item
Is the patch against latest FFmpeg git master branch?
Is the patch against latest Libav git master branch?
@item
Are you subscribed to ffmpeg-dev?
(the list is subscribers only due to spam)
@ -374,7 +374,7 @@ send a reminder by email. Your patch should eventually be dealt with.
patch easily?
@item
If you added a new file, did you insert a license header? It should be
taken from FFmpeg, not randomly copied and pasted from somewhere else.
taken from Libav, not randomly copied and pasted from somewhere else.
@item
You should maintain alphabetical order in alphabetically ordered lists as
long as doing so does not break API/ABI compatibility.

View File

@ -1,10 +1,10 @@
@chapter Encoders
@c man begin ENCODERS
Encoders are configured elements in FFmpeg which allow the encoding of
Encoders are configured elements in Libav which allow the encoding of
multimedia streams.
When you configure your FFmpeg build, all the supported native encoders
When you configure your Libav build, all the supported native encoders
are enabled by default. Encoders requiring an external library must be enabled
manually via the corresponding @code{--enable-lib} option. You can list all
available encoders using the configure option @code{--list-encoders}.

View File

@ -1,7 +1,7 @@
@chapter Expression Evaluation
@c man begin EXPRESSION EVALUATION
When evaluating an arithemetic expression, FFmpeg uses an internal
When evaluating an arithemetic expression, Libav uses an internal
formula evaluator, implemented through the @file{libavutil/eval.h}
interface.

View File

@ -1,8 +1,8 @@
\input texinfo @c -*- texinfo -*-
@settitle FFmpeg FAQ
@settitle Libav FAQ
@titlepage
@center @titlefont{FFmpeg FAQ}
@center @titlefont{Libav FAQ}
@end titlepage
@top
@ -11,33 +11,33 @@
@chapter General Questions
@section When will the next FFmpeg version be released? / Why are FFmpeg releases so few and far between?
@section When will the next Libav version be released? / Why are Libav releases so few and far between?
Like most open source projects FFmpeg suffers from a certain lack of
Like most open source projects Libav suffers from a certain lack of
manpower. For this reason the developers have to prioritize the work
they do and putting out releases is not at the top of the list, fixing
bugs and reviewing patches takes precedence. Please don't complain or
request more timely and/or frequent releases unless you are willing to
help out creating them.
@section I have a problem with an old version of FFmpeg; where should I report it?
Nowhere. We do not support old FFmpeg versions in any way, we simply lack
@section I have a problem with an old version of Libav; where should I report it?
Nowhere. We do not support old Libav versions in any way, we simply lack
the time, motivation and manpower to do so. If you have a problem with an
old version of FFmpeg, upgrade to the latest Subversion snapshot. If you
old version of Libav, upgrade to the latest Subversion snapshot. If you
still experience the problem, then you can report it according to the
guidelines in @url{http://ffmpeg.org/bugreports.html}.
@section Why doesn't FFmpeg support feature [xyz]?
@section Why doesn't Libav support feature [xyz]?
Because no one has taken on that task yet. FFmpeg development is
Because no one has taken on that task yet. Libav development is
driven by the tasks that are important to the individual developers.
If there is a feature that is important to you, the best way to get
it implemented is to undertake the task yourself or sponsor a developer.
@section FFmpeg does not support codec XXX. Can you include a Windows DLL loader to support it?
@section Libav does not support codec XXX. Can you include a Windows DLL loader to support it?
No. Windows DLLs are not portable, bloated and often slow.
Moreover FFmpeg strives to support all codecs natively.
Moreover Libav strives to support all codecs natively.
A DLL loader is not conducive to that goal.
@section My bug report/mail to ffmpeg-devel/user has not received any replies.
@ -55,10 +55,10 @@ libav* from another application.
@item You speak about a video having problems on playback but
not what you use to play it.
@item We have no faint clue what you are talking about besides
that it is related to FFmpeg.
that it is related to Libav.
@end itemize
@section Is there a forum for FFmpeg? I do not like mailing lists.
@section Is there a forum for Libav? I do not like mailing lists.
You may view our mailing lists with a more forum-alike look here:
@url{http://dir.gmane.org/gmane.comp.video.ffmpeg.user},
@ -272,7 +272,7 @@ material, and try '-top 0/1' if the result looks really messed-up.
@section How can I read DirectShow files?
If you have built FFmpeg with @code{./configure --enable-avisynth}
If you have built Libav with @code{./configure --enable-avisynth}
(only possible on MinGW/Cygwin platforms),
then you may use any file that DirectShow can read as input.
@ -368,11 +368,11 @@ examining all of the vbv_delay values and making complicated computations."
@chapter Development
@section Are there examples illustrating how to use the FFmpeg libraries, particularly libavcodec and libavformat?
@section Are there examples illustrating how to use the Libav libraries, particularly libavcodec and libavformat?
Yes. Read the Developers Guide of the FFmpeg documentation. Alternatively,
Yes. Read the Developers Guide of the Libav documentation. Alternatively,
examine the source code for one of the many open source projects that
already incorporate FFmpeg at (@url{projects.html}).
already incorporate Libav at (@url{projects.html}).
@section Can you support my C compiler XXX?
@ -383,14 +383,14 @@ with @code{#ifdef}s related to the compiler.
@section Is Microsoft Visual C++ supported?
No. Microsoft Visual C++ is not compliant to the C99 standard and does
not - among other things - support the inline assembly used in FFmpeg.
not - among other things - support the inline assembly used in Libav.
If you wish to use MSVC++ for your
project then you can link the MSVC++ code with libav* as long as
you compile the latter with a working C compiler. For more information, see
the @emph{Microsoft Visual C++ compatibility} section in the FFmpeg
the @emph{Microsoft Visual C++ compatibility} section in the Libav
documentation.
There have been efforts to make FFmpeg compatible with MSVC++ in the
There have been efforts to make Libav compatible with MSVC++ in the
past. However, they have all been rejected as too intrusive, especially
since MinGW does the job adequately. None of the core developers
work with MSVC++ and thus this item is low priority. Should you find
@ -398,13 +398,13 @@ the silver bullet that solves this problem, feel free to shoot it at us.
We strongly recommend you to move over from MSVC++ to MinGW tools.
@section Can I use FFmpeg or libavcodec under Windows?
@section Can I use Libav or libavcodec under Windows?
Yes, but the Cygwin or MinGW tools @emph{must} be used to compile FFmpeg.
Read the @emph{Windows} section in the FFmpeg documentation to find more
Yes, but the Cygwin or MinGW tools @emph{must} be used to compile Libav.
Read the @emph{Windows} section in the Libav documentation to find more
information.
To get help and instructions for building FFmpeg under Windows, check out
To get help and instructions for building Libav under Windows, check out
the FFmpeg Windows Help Forum at
@url{http://ffmpeg.arrozcru.org/}.
@ -414,7 +414,7 @@ No. These tools are too bloated and they complicate the build.
@section Why not rewrite ffmpeg in object-oriented C++?
FFmpeg is already organized in a highly modular manner and does not need to
Libav is already organized in a highly modular manner and does not need to
be rewritten in a formal object language. Further, many of the developers
favor straight C; it works for them. For more arguments on this matter,
read "Programming Religion" at (@url{http://www.tux.org/lkml/#s15}).
@ -441,16 +441,16 @@ the compilation failure then you are probably not qualified for this.
@section I'm using libavcodec from within my C++ application but the linker complains about missing symbols which seem to be available.
FFmpeg is a pure C project, so to use the libraries within your C++ application
Libav is a pure C project, so to use the libraries within your C++ application
you need to explicitly state that you are using a C library. You can do this by
encompassing your FFmpeg includes using @code{extern "C"}.
encompassing your Libav includes using @code{extern "C"}.
See @url{http://www.parashift.com/c++-faq-lite/mixing-c-and-cpp.html#faq-32.3}
@section I have a file in memory / a API different from *open/*read/ libc how do I use it with libavformat?
You have to implement a URLProtocol, see @file{libavformat/file.c} in
FFmpeg and @file{libmpdemux/demux_lavf.c} in MPlayer sources.
Libav and @file{libmpdemux/demux_lavf.c} in MPlayer sources.
@section I get "No compatible shell script interpreter found." in MSys.

View File

@ -735,7 +735,7 @@ A preset file contains a sequence of @var{option}=@var{value} pairs,
one for each line, specifying a sequence of options which would be
awkward to specify on the command line. Lines starting with the hash
('#') character are ignored and are used to provide comments. Check
the @file{ffpresets} directory in the FFmpeg source tree for examples.
the @file{ffpresets} directory in the Libav source tree for examples.
Preset files are specified with the @code{vpre}, @code{apre},
@code{spre}, and @code{fpre} options. The @code{fpre} option takes the
@ -997,11 +997,11 @@ file to which you want to add them.
@settitle FFmpeg video converter
@c man begin SEEALSO
ffplay(1), ffprobe(1), ffserver(1) and the FFmpeg HTML documentation
ffplay(1), ffprobe(1), ffserver(1) and the Libav HTML documentation
@c man end
@c man begin AUTHORS
The FFmpeg developers
The Libav developers
@c man end
@end ignore

View File

@ -20,9 +20,9 @@ ffplay [options] @file{input_file}
@chapter Description
@c man begin DESCRIPTION
FFplay is a very simple and portable media player using the FFmpeg
FFplay is a very simple and portable media player using the Libav
libraries and the SDL library. It is mostly used as a testbed for the
various FFmpeg APIs.
various Libav APIs.
@c man end
@chapter Options
@ -169,11 +169,11 @@ Seek to percentage in file corresponding to fraction of width.
@settitle FFplay media player
@c man begin SEEALSO
ffmpeg(1), ffprobe(1), ffserver(1) and the FFmpeg HTML documentation
ffmpeg(1), ffprobe(1), ffserver(1) and the Libav HTML documentation
@c man end
@c man begin AUTHORS
The FFmpeg developers
The Libav developers
@c man end
@end ignore

View File

@ -122,11 +122,11 @@ with name "STREAM".
@settitle FFprobe media prober
@c man begin SEEALSO
ffmpeg(1), ffplay(1), ffserver(1) and the FFmpeg HTML documentation
ffmpeg(1), ffplay(1), ffserver(1) and the Libav HTML documentation
@c man end
@c man begin AUTHORS
The FFmpeg developers
The Libav developers
@c man end
@end ignore

View File

@ -266,11 +266,11 @@ rather than as a daemon.
@c man begin SEEALSO
ffmpeg(1), ffplay(1), ffprobe(1), the @file{ffmpeg/doc/ffserver.conf}
example and the FFmpeg HTML documentation
example and the Libav HTML documentation
@c man end
@c man begin AUTHORS
The FFmpeg developers
The Libav developers
@c man end
@end ignore

View File

@ -88,6 +88,6 @@ can be disabled setting the environment variable
@env{FFMPEG_FORCE_NOCOLOR} or @env{NO_COLOR}, or can be forced setting
the environment variable @env{FFMPEG_FORCE_COLOR}.
The use of the environment variable @env{NO_COLOR} is deprecated and
will be dropped in a following FFmpeg version.
will be dropped in a following Libav version.
@end table

View File

@ -92,7 +92,7 @@ Follows a BNF description for the filtergraph syntax:
@chapter Audio Filters
@c man begin AUDIO FILTERS
When you configure your FFmpeg build, you can disable any of the
When you configure your Libav build, you can disable any of the
existing filters using --disable-filters.
The configure output will show the audio filters included in your
build.
@ -155,7 +155,7 @@ tools.
@chapter Video Filters
@c man begin VIDEO FILTERS
When you configure your FFmpeg build, you can disable any of the
When you configure your Libav build, you can disable any of the
existing filters using --disable-filters.
The configure output will show the video filters included in your
build.
@ -531,7 +531,7 @@ format=yuv420p:yuv444p:yuv410p
Apply a frei0r effect to the input video.
To enable compilation of this filter you need to install the frei0r
header and configure FFmpeg with --enable-frei0r.
header and configure Libav with --enable-frei0r.
The filter supports the syntax:
@example
@ -669,7 +669,7 @@ Pass the video source unchanged to the output.
Apply video transform using libopencv.
To enable this filter install libopencv library and headers and
configure FFmpeg with --enable-libopencv.
configure Libav with --enable-libopencv.
The filter takes the parameters: @var{filter_name}@{:=@}@var{filter_params}.
@ -1314,7 +1314,7 @@ timebase. The expression can contain the constants "PI", "E", "PHI",
Provide a frei0r source.
To enable compilation of this filter you need to install the frei0r
header and configure FFmpeg with --enable-frei0r.
header and configure Libav with --enable-frei0r.
The source supports the syntax:
@example

View File

@ -11,13 +11,13 @@
@chapter external libraries
FFmpeg can be hooked up with a number of external libraries to add support
Libav can be hooked up with a number of external libraries to add support
for more formats. None of them are used by default, their use has to be
explicitly requested by passing the appropriate flags to @file{./configure}.
@section OpenCORE AMR
FFmpeg can make use of the OpenCORE libraries for AMR-NB
Libav can make use of the OpenCORE libraries for AMR-NB
decoding/encoding and AMR-WB decoding.
Go to @url{http://sourceforge.net/projects/opencore-amr/} and follow the instructions for
@ -27,7 +27,7 @@ installing the libraries. Then pass @code{--enable-libopencore-amrnb} and/or
Note that OpenCORE is under the Apache License 2.0 (see
@url{http://www.apache.org/licenses/LICENSE-2.0} for details), which is
incompatible with the LGPL version 2.1 and GPL version 2. You have to
upgrade FFmpeg's license to LGPL version 3 (or if you have enabled
upgrade Libav's license to LGPL version 3 (or if you have enabled
GPL components, GPL version 3) to use it.
@ -37,7 +37,7 @@ You can use the @code{-formats} and @code{-codecs} options to have an exhaustive
@section File Formats
FFmpeg supports the following file formats through the @code{libavformat}
Libav supports the following file formats through the @code{libavformat}
library:
@multitable @columnfractions .4 .1 .1 .4
@ -751,18 +751,18 @@ Using a cross-compiler is preferred for various reasons.
@subsection DJGPP
FFmpeg cannot be compiled because of broken system headers, add
Libav cannot be compiled because of broken system headers, add
@code{--extra-cflags=-U__STRICT_ANSI__} to the configure options as a
workaround.
@section OS/2
For information about compiling FFmpeg on OS/2 see
For information about compiling Libav on OS/2 see
@url{http://www.edm2.com/index.php/FFmpeg}.
@section Unix-like
Some parts of FFmpeg cannot be built with version 2.15 of the GNU
Some parts of Libav cannot be built with version 2.15 of the GNU
assembler which is still provided by a few AMD64 distributions. To
make sure your compiler really uses the required version of gas
after a binutils upgrade, run:
@ -777,7 +777,7 @@ to configure.
@subsection BSD
BSD make will not build FFmpeg, you need to install and use GNU Make
BSD make will not build Libav, you need to install and use GNU Make
(@file{gmake}).
@subsubsection FreeBSD
@ -790,7 +790,7 @@ getting the system headers fixed.
@subsection (Open)Solaris
GNU Make is required to build FFmpeg, so you have to invoke (@file{gmake}),
GNU Make is required to build Libav, so you have to invoke (@file{gmake}),
standard Solaris Make will not work. When building with a non-c99 front-end
(gcc, generic suncc) add either @code{--extra-libs=/usr/lib/values-xpg6.o}
or @code{--extra-libs=/usr/lib/64/values-xpg6.o} to the configure options
@ -808,22 +808,22 @@ bash ./configure
MacOS X on PowerPC or ARM (iPhone) requires a preprocessor from
@url{http://github.com/yuvi/gas-preprocessor} to build the optimized
assembler functions. Just download the Perl script and put it somewhere
in your PATH, FFmpeg's configure will pick it up automatically.
in your PATH, Libav's configure will pick it up automatically.
@section Windows
To get help and instructions for building FFmpeg under Windows, check out
To get help and instructions for building Libav under Windows, check out
the FFmpeg Windows Help Forum at
@url{http://ffmpeg.arrozcru.org/}.
@subsection Native Windows compilation
FFmpeg can be built to run natively on Windows using the MinGW tools. Install
Libav can be built to run natively on Windows using the MinGW tools. Install
the latest versions of MSYS and MinGW from @url{http://www.mingw.org/}.
You can find detailed installation
instructions in the download section and the FAQ.
FFmpeg does not build out-of-the-box with the packages the automated MinGW
Libav does not build out-of-the-box with the packages the automated MinGW
installer provides. It also requires coreutils to be installed and many other
packages updated to the latest version. The minimum version for some packages
are listed below:
@ -835,7 +835,7 @@ are listed below:
@item mingw-runtime 3.15
@end itemize
FFmpeg automatically passes @code{-fno-common} to the compiler to work around
Libav automatically passes @code{-fno-common} to the compiler to work around
a GCC bug (see @url{http://gcc.gnu.org/bugzilla/show_bug.cgi?id=37216}).
Within the MSYS shell, configure and make with:
@ -866,14 +866,14 @@ Edit the @file{bin/sdl-config} script so that it points to the correct prefix
where SDL was installed. Verify that @file{sdl-config} can be launched from
the MSYS command line.
@item By using @code{./configure --enable-shared} when configuring FFmpeg,
@item By using @code{./configure --enable-shared} when configuring Libav,
you can build libavutil, libavcodec and libavformat as DLLs.
@end itemize
@subsection Microsoft Visual C++ compatibility
As stated in the FAQ, FFmpeg will not compile under MSVC++. However, if you
As stated in the FAQ, Libav will not compile under MSVC++. However, if you
want to use the libav* libraries in your own applications, you can still
compile those applications using MSVC++. But the libav* libraries you link
to @emph{must} be built with MinGW. However, you will not be able to debug
@ -881,13 +881,13 @@ inside the libav* libraries, since MSVC++ does not recognize the debug
symbols generated by GCC.
We strongly recommend you to move over from MSVC++ to MinGW tools.
This description of how to use the FFmpeg libraries with MSVC++ is based on
This description of how to use the Libav libraries with MSVC++ is based on
Microsoft Visual C++ 2005 Express Edition. If you have a different version,
you might have to modify the procedures slightly.
@subsubsection Using static libraries
Assuming you have just built and installed FFmpeg in @file{/usr/local}.
Assuming you have just built and installed Libav in @file{/usr/local}.
@enumerate
@ -898,13 +898,13 @@ Application Wizard, uncheck the "Precompiled headers" option.
@item Write the source code for your application, or, for testing, just
copy the code from an existing sample application into the source file
that MSVC++ has already created for you. For example, you can copy
@file{libavformat/output-example.c} from the FFmpeg distribution.
@file{libavformat/output-example.c} from the Libav distribution.
@item Open the "Project / Properties" dialog box. In the "Configuration"
combo box, select "All Configurations" so that the changes you make will
affect both debug and release builds. In the tree view on the left hand
side, select "C/C++ / General", then edit the "Additional Include
Directories" setting to contain the path where the FFmpeg includes were
Directories" setting to contain the path where the Libav includes were
installed (i.e. @file{c:\msys\1.0\local\include}).
Do not add MinGW's include directory here, or the include files will
conflict with MSVC's.
@ -912,7 +912,7 @@ conflict with MSVC's.
@item Still in the "Project / Properties" dialog box, select
"Linker / General" from the tree view and edit the
"Additional Library Directories" setting to contain the @file{lib}
directory where FFmpeg was installed (i.e. @file{c:\msys\1.0\local\lib}),
directory where Libav was installed (i.e. @file{c:\msys\1.0\local\lib}),
the directory where MinGW libs are installed (i.e. @file{c:\mingw\lib}),
and the directory where MinGW's GCC libs are installed
(i.e. @file{C:\mingw\lib\gcc\mingw32\4.2.1-sjlj}). Then select
@ -929,13 +929,13 @@ set to "Multi-threaded DLL".
@item Click "OK" to close the "Project / Properties" dialog box.
@item MSVC++ lacks some C99 header files that are fundamental for FFmpeg.
@item MSVC++ lacks some C99 header files that are fundamental for Libav.
Get msinttypes from @url{http://code.google.com/p/msinttypes/downloads/list}
and install it in MSVC++'s include directory
(i.e. @file{C:\Program Files\Microsoft Visual Studio 8\VC\include}).
@item MSVC++ also does not understand the @code{inline} keyword used by
FFmpeg, so you must add this line before @code{#include}ing libav*:
Libav, so you must add this line before @code{#include}ing libav*:
@example
#define inline _inline
@end example
@ -968,10 +968,10 @@ and run @file{c:\msys\1.0\msys.bat} from there.
@item Within the MSYS shell, run @code{lib.exe}. If you get a help message
from @file{Microsoft (R) Library Manager}, this means your environment
variables are set up correctly, the @file{Microsoft (R) Library Manager}
is on the path and will be used by FFmpeg to create
is on the path and will be used by Libav to create
MSVC++-compatible import libraries.
@item Build FFmpeg with
@item Build Libav with
@example
./configure --enable-shared --enable-memalign-hack
@ -1000,7 +1000,7 @@ of DLL files, but the ones that are actually used to run your application
are the ones with a major version number in their filenames
(i.e. @file{avcodec-51.dll}).
FFmpeg headers do not declare global data for Windows DLLs through the usual
Libav headers do not declare global data for Windows DLLs through the usual
dllexport/dllimport interface. Such data will be exported properly while
building, but to use them in your MSVC++ code you will have to edit the
appropriate headers and mark the data as dllimport. For example, in
@ -1014,14 +1014,14 @@ extern __declspec(dllimport) const AVPixFmtDescriptor av_pix_fmt_descriptors[];
You must use the MinGW cross compilation tools available at
@url{http://www.mingw.org/}.
Then configure FFmpeg with the following options:
Then configure Libav with the following options:
@example
./configure --target-os=mingw32 --cross-prefix=i386-mingw32msvc-
@end example
(you can change the cross-prefix according to the prefix chosen for the
MinGW tools).
Then you can easily test FFmpeg with Wine
Then you can easily test Libav with Wine
(@url{http://www.winehq.com/}).
@subsection Compilation under Cygwin
@ -1055,7 +1055,7 @@ shared libraries:
./configure --enable-shared --disable-static --extra-cflags=-fno-reorder-functions
@end example
If you want to build FFmpeg with additional libraries, download Cygwin
If you want to build Libav with additional libraries, download Cygwin
"Devel" packages for Ogg and Vorbis from any Cygwin packages repository:
@example
libogg-devel, libvorbis-devel

View File

@ -28,9 +28,9 @@ Consult these resources whenever you have problems, they are quite exhaustive.
You do not need a special username or password.
All you need is to provide a ssh public key to the Git server admin.
What follows now is a basic introduction to Git and some FFmpeg-specific
What follows now is a basic introduction to Git and some Libav-specific
guidelines. Read it at least once, if you are granted commit privileges to the
FFmpeg project you are expected to be familiar with these rules.
Libav project you are expected to be familiar with these rules.
@ -46,11 +46,11 @@ I. BASICS:
git clone git://git.videolan.org/ffmpeg <target>
This will put the FFmpeg sources into the directory <target>.
This will put the Libav sources into the directory <target>.
git clone git@git.videolan.org:ffmpeg <target>
This will put the FFmpeg sources into the directory <target> and let
This will put the Libav sources into the directory <target> and let
you push back your changes to the remote repository.
@ -72,7 +72,7 @@ I. BASICS:
fetches the changes from the main repository and replays your local commits
over it. This is required to keep all your local changes at the top of
FFmpeg's master tree. The master tree will reject pushes with merge commits.
Libav's master tree. The master tree will reject pushes with merge commits.
3. Adding/removing files/directories:

View File

@ -1,10 +1,10 @@
@chapter Input Devices
@c man begin INPUT DEVICES
Input devices are configured elements in FFmpeg which allow to access
Input devices are configured elements in Libav which allow to access
the data coming from a multimedia device attached to your system.
When you configure your FFmpeg build, all the supported input devices
When you configure your Libav build, all the supported input devices
are enabled by default. You can list all available ones using the
configure option "--list-indevs".
@ -95,7 +95,7 @@ A JACK input device creates one or more JACK writable clients, one for
each audio channel, with name @var{client_name}:input_@var{N}, where
@var{client_name} is the name provided by the application, and @var{N}
is a number which identifies the channel.
Each writable client will send the acquired data to the FFmpeg input
Each writable client will send the acquired data to the Libav input
device.
Once you have created one or more JACK readable clients, you need to

View File

@ -1,11 +1,11 @@
FFmpeg's bug/patch/feature request tracker manual
=================================================
Libav's bug/patch/feature request tracker manual
================================================
NOTE: This is a draft.
Overview:
---------
FFmpeg uses Roundup for tracking issues, new issues and changes to
Libav uses Roundup for tracking issues, new issues and changes to
existing issues can be done through a web interface and through email.
It is possible to subscribe to individual issues by adding yourself to the
nosy list or to subscribe to the ffmpeg-issues mailing list which receives
@ -60,7 +60,7 @@ critical
No feature request can be critical.
important
Bugs which make FFmpeg unusable for a significant number of users, and
Bugs which make Libav unusable for a significant number of users, and
patches fixing them.
Examples here might be completely broken MPEG-4 decoding or a build issue
on Linux.
@ -81,7 +81,7 @@ minor
wish
Something that is desirable to have but that there is no urgency at
all to implement, e.g. something completely cosmetic like a website
restyle or a personalized doxy template or the FFmpeg logo.
restyle or a personalized doxy template or the Libav logo.
This priority is not valid for bugs.

View File

@ -11,10 +11,10 @@
@chapter Introduction
Libavfilter is the filtering API of FFmpeg. It is the substitute of the
Libavfilter is the filtering API of Libav. It is the substitute of the
now deprecated 'vhooks' and started as a Google Summer of Code project.
Integrating libavfilter into the main FFmpeg repository is a work in
Integrating libavfilter into the main Libav repository is a work in
progress. If you wish to try the unfinished development code of
libavfilter then check it out from the libavfilter repository into
some directory of your choice by:
@ -74,7 +74,7 @@ not have video output.
@chapter graph2dot
The @file{graph2dot} program included in the FFmpeg @file{tools}
The @file{graph2dot} program included in the Libav @file{tools}
directory can be used to parse a filter graph description and issue a
corresponding textual representation in the dot language.

View File

@ -1,7 +1,7 @@
@chapter Metadata
@c man begin METADATA
FFmpeg is able to dump metadata from media files into a simple UTF-8-encoded
Libav is able to dump metadata from media files into a simple UTF-8-encoded
INI-like text file and then load it back using the metadata muxer/demuxer.
The file format is as follows:
@ -53,7 +53,7 @@ A ffmetadata file might look like this:
;FFMETADATA1
title=bike\\shed
;this is a comment
artist=FFmpeg troll team
artist=Libav troll team
[CHAPTER]
TIMEBASE=1/1000

View File

@ -1,7 +1,7 @@
FFmpeg multithreading methods
Libav multithreading methods
==============================================
FFmpeg provides two methods for multithreading codecs.
Libav provides two methods for multithreading codecs.
Slice threading decodes multiple parts of a frame at the same time, using
AVCodecContext execute() and execute2().

View File

@ -1,10 +1,10 @@
@chapter Muxers
@c man begin MUXERS
Muxers are configured elements in FFmpeg which allow writing
Muxers are configured elements in Libav which allow writing
multimedia streams to a particular type of file.
When you configure your FFmpeg build, all the supported muxers
When you configure your Libav build, all the supported muxers
are enabled by default. You can list all available muxers using the
configure option @code{--list-muxers}.

View File

@ -201,7 +201,7 @@ Inline asm vs. external asm
---------------------------
Both inline asm (__asm__("..") in a .c file, handled by a compiler such as gcc)
and external asm (.s or .asm files, handled by an assembler such as yasm/nasm)
are accepted in FFmpeg. Which one to use differs per specific case.
are accepted in Libav. Which one to use differs per specific case.
- if your code is intended to be inlined in a C function, inline asm is always
better, because external asm cannot be inlined

View File

@ -1,10 +1,10 @@
@chapter Output Devices
@c man begin OUTPUT DEVICES
Output devices are configured elements in FFmpeg which allow to write
Output devices are configured elements in Libav which allow to write
multimedia data to an output device attached to your system.
When you configure your FFmpeg build, all the supported output devices
When you configure your Libav build, all the supported output devices
are enabled by default. You can list all available ones using the
configure option "--list-outdevs".

View File

@ -1,10 +1,10 @@
@chapter Protocols
@c man begin PROTOCOLS
Protocols are configured elements in FFmpeg which allow to access
Protocols are configured elements in Libav which allow to access
resources which require the use of a particular protocol.
When you configure your FFmpeg build, all the supported protocols are
When you configure your Libav build, all the supported protocols are
enabled by default. You can list all available ones using the
configure option "--list-protocols".
@ -25,8 +25,9 @@ Read Apple HTTP Live Streaming compliant segmented stream as
a uniform one. The M3U8 playlists describing the segments can be
remote HTTP resources or local files, accessed using the standard
file protocol.
HTTP is default, specific protocol can be declared using the "+"
specifier.
HTTP is default, specific protocol can be declared by specifying
"+@var{proto}" after the applehttp URI scheme name, where @var{proto}
is either "file" or "http".
@example
applehttp://host/path/to/remote/resource.m3u8
@ -34,7 +35,6 @@ applehttp+http://host/path/to/remote/resource.m3u8
applehttp+file://path/to/local/resource.m3u8
@end example
@section concat
Physical concatenation protocol.

View File

@ -8,9 +8,9 @@ it's a little late for this year's soc (2006).
The Goal:
Our goal in respect to soc is and must be of course exactly one thing and
that is to improve FFmpeg, to reach this goal, code must
that is to improve Libav, to reach this goal, code must
* conform to the development policy and patch submission guidelines
* must improve FFmpeg somehow (faster, smaller, "better",
* must improve Libav somehow (faster, smaller, "better",
more codecs supported, fewer bugs, cleaner, ...)
for mentors and other developers to help students to reach that goal it is
@ -20,5 +20,5 @@ easy reviewable that again leads us to:
* separation of cosmetic from non-cosmetic changes (this is almost entirely
ignored by mentors and students in soc 2006 which might lead to a suprise
when the code will be reviewed at the end before a possible inclusion in
FFmpeg, individual changes were generally not reviewable due to cosmetics).
Libav, individual changes were generally not reviewable due to cosmetics).
* frequent commits, so that comments can be provided early

View File

@ -1861,7 +1861,7 @@ static void fmt_bytecount(AVIOContext *pb, int64_t count)
for (s = suffix; count >= 100000 && s[1]; count /= 1000, s++);
url_fprintf(pb, "%"PRId64"%c", count, *s);
avio_printf(pb, "%"PRId64"%c", count, *s);
}
static void compute_status(HTTPContext *c)
@ -1880,20 +1880,20 @@ static void compute_status(HTTPContext *c)
return;
}
url_fprintf(pb, "HTTP/1.0 200 OK\r\n");
url_fprintf(pb, "Content-type: %s\r\n", "text/html");
url_fprintf(pb, "Pragma: no-cache\r\n");
url_fprintf(pb, "\r\n");
avio_printf(pb, "HTTP/1.0 200 OK\r\n");
avio_printf(pb, "Content-type: %s\r\n", "text/html");
avio_printf(pb, "Pragma: no-cache\r\n");
avio_printf(pb, "\r\n");
url_fprintf(pb, "<html><head><title>%s Status</title>\n", program_name);
avio_printf(pb, "<html><head><title>%s Status</title>\n", program_name);
if (c->stream->feed_filename[0])
url_fprintf(pb, "<link rel=\"shortcut icon\" href=\"%s\">\n", c->stream->feed_filename);
url_fprintf(pb, "</head>\n<body>");
url_fprintf(pb, "<h1>%s Status</h1>\n", program_name);
avio_printf(pb, "<link rel=\"shortcut icon\" href=\"%s\">\n", c->stream->feed_filename);
avio_printf(pb, "</head>\n<body>");
avio_printf(pb, "<h1>%s Status</h1>\n", program_name);
/* format status */
url_fprintf(pb, "<h2>Available Streams</h2>\n");
url_fprintf(pb, "<table cellspacing=0 cellpadding=4>\n");
url_fprintf(pb, "<tr><th valign=top>Path<th align=left>Served<br>Conns<th><br>bytes<th valign=top>Format<th>Bit rate<br>kbits/s<th align=left>Video<br>kbits/s<th><br>Codec<th align=left>Audio<br>kbits/s<th><br>Codec<th align=left valign=top>Feed\n");
avio_printf(pb, "<h2>Available Streams</h2>\n");
avio_printf(pb, "<table cellspacing=0 cellpadding=4>\n");
avio_printf(pb, "<tr><th valign=top>Path<th align=left>Served<br>Conns<th><br>bytes<th valign=top>Format<th>Bit rate<br>kbits/s<th align=left>Video<br>kbits/s<th><br>Codec<th align=left>Audio<br>kbits/s<th><br>Codec<th align=left valign=top>Feed\n");
stream = first_stream;
while (stream != NULL) {
char sfilename[1024];
@ -1921,9 +1921,9 @@ static void compute_status(HTTPContext *c)
}
}
url_fprintf(pb, "<tr><td><a href=\"/%s\">%s</a> ",
avio_printf(pb, "<tr><td><a href=\"/%s\">%s</a> ",
sfilename, stream->filename);
url_fprintf(pb, "<td align=right> %d <td align=right> ",
avio_printf(pb, "<td align=right> %d <td align=right> ",
stream->conns_served);
fmt_bytecount(pb, stream->bytes_served);
switch(stream->stream_type) {
@ -1962,33 +1962,33 @@ static void compute_status(HTTPContext *c)
abort();
}
}
url_fprintf(pb, "<td align=center> %s <td align=right> %d <td align=right> %d <td> %s %s <td align=right> %d <td> %s %s",
avio_printf(pb, "<td align=center> %s <td align=right> %d <td align=right> %d <td> %s %s <td align=right> %d <td> %s %s",
stream->fmt->name,
stream->bandwidth,
video_bit_rate / 1000, video_codec_name, video_codec_name_extra,
audio_bit_rate / 1000, audio_codec_name, audio_codec_name_extra);
if (stream->feed)
url_fprintf(pb, "<td>%s", stream->feed->filename);
avio_printf(pb, "<td>%s", stream->feed->filename);
else
url_fprintf(pb, "<td>%s", stream->feed_filename);
url_fprintf(pb, "\n");
avio_printf(pb, "<td>%s", stream->feed_filename);
avio_printf(pb, "\n");
}
break;
default:
url_fprintf(pb, "<td align=center> - <td align=right> - <td align=right> - <td><td align=right> - <td>\n");
avio_printf(pb, "<td align=center> - <td align=right> - <td align=right> - <td><td align=right> - <td>\n");
break;
}
}
stream = stream->next;
}
url_fprintf(pb, "</table>\n");
avio_printf(pb, "</table>\n");
stream = first_stream;
while (stream != NULL) {
if (stream->feed == stream) {
url_fprintf(pb, "<h2>Feed %s</h2>", stream->filename);
avio_printf(pb, "<h2>Feed %s</h2>", stream->filename);
if (stream->pid) {
url_fprintf(pb, "Running as pid %d.\n", stream->pid);
avio_printf(pb, "Running as pid %d.\n", stream->pid);
#if defined(linux) && !defined(CONFIG_NOCUTILS)
{
@ -2007,7 +2007,7 @@ static void compute_status(HTTPContext *c)
if (fscanf(pid_stat, "%10s %64s", cpuperc,
cpuused) == 2) {
url_fprintf(pb, "Currently using %s%% of the cpu. Total time used %s.\n",
avio_printf(pb, "Currently using %s%% of the cpu. Total time used %s.\n",
cpuperc, cpuused);
}
fclose(pid_stat);
@ -2015,9 +2015,9 @@ static void compute_status(HTTPContext *c)
}
#endif
url_fprintf(pb, "<p>");
avio_printf(pb, "<p>");
}
url_fprintf(pb, "<table cellspacing=0 cellpadding=4><tr><th>Stream<th>type<th>kbits/s<th align=left>codec<th align=left>Parameters\n");
avio_printf(pb, "<table cellspacing=0 cellpadding=4><tr><th>Stream<th>type<th>kbits/s<th align=left>codec<th align=left>Parameters\n");
for (i = 0; i < stream->nb_streams; i++) {
AVStream *st = stream->streams[i];
@ -2040,26 +2040,26 @@ static void compute_status(HTTPContext *c)
default:
abort();
}
url_fprintf(pb, "<tr><td align=right>%d<td>%s<td align=right>%d<td>%s<td>%s\n",
avio_printf(pb, "<tr><td align=right>%d<td>%s<td align=right>%d<td>%s<td>%s\n",
i, type, st->codec->bit_rate/1000, codec ? codec->name : "", parameters);
}
url_fprintf(pb, "</table>\n");
avio_printf(pb, "</table>\n");
}
stream = stream->next;
}
/* connection status */
url_fprintf(pb, "<h2>Connection Status</h2>\n");
avio_printf(pb, "<h2>Connection Status</h2>\n");
url_fprintf(pb, "Number of connections: %d / %d<br>\n",
avio_printf(pb, "Number of connections: %d / %d<br>\n",
nb_connections, nb_max_connections);
url_fprintf(pb, "Bandwidth in use: %"PRIu64"k / %"PRIu64"k<br>\n",
avio_printf(pb, "Bandwidth in use: %"PRIu64"k / %"PRIu64"k<br>\n",
current_bandwidth, max_bandwidth);
url_fprintf(pb, "<table>\n");
url_fprintf(pb, "<tr><th>#<th>File<th>IP<th>Proto<th>State<th>Target bits/sec<th>Actual bits/sec<th>Bytes transferred\n");
avio_printf(pb, "<table>\n");
avio_printf(pb, "<tr><th>#<th>File<th>IP<th>Proto<th>State<th>Target bits/sec<th>Actual bits/sec<th>Bytes transferred\n");
c1 = first_http_ctx;
i = 0;
while (c1 != NULL) {
@ -2078,7 +2078,7 @@ static void compute_status(HTTPContext *c)
i++;
p = inet_ntoa(c1->from_addr.sin_addr);
url_fprintf(pb, "<tr><td><b>%d</b><td>%s%s<td>%s<td>%s<td>%s<td align=right>",
avio_printf(pb, "<tr><td><b>%d</b><td>%s%s<td>%s<td>%s<td>%s<td align=right>",
i,
c1->stream ? c1->stream->filename : "",
c1->state == HTTPSTATE_RECEIVE_DATA ? "(input)" : "",
@ -2086,20 +2086,20 @@ static void compute_status(HTTPContext *c)
c1->protocol,
http_state[c1->state]);
fmt_bytecount(pb, bitrate);
url_fprintf(pb, "<td align=right>");
avio_printf(pb, "<td align=right>");
fmt_bytecount(pb, compute_datarate(&c1->datarate, c1->data_count) * 8);
url_fprintf(pb, "<td align=right>");
avio_printf(pb, "<td align=right>");
fmt_bytecount(pb, c1->data_count);
url_fprintf(pb, "\n");
avio_printf(pb, "\n");
c1 = c1->next;
}
url_fprintf(pb, "</table>\n");
avio_printf(pb, "</table>\n");
/* date */
ti = time(NULL);
p = ctime(&ti);
url_fprintf(pb, "<hr size=1 noshade>Generated at %s", p);
url_fprintf(pb, "</body>\n</html>\n");
avio_printf(pb, "<hr size=1 noshade>Generated at %s", p);
avio_printf(pb, "</body>\n</html>\n");
len = url_close_dyn_buf(pb, &c->pb_buffer);
c->buffer_ptr = c->pb_buffer;
@ -2721,7 +2721,8 @@ static int http_receive_data(HTTPContext *c)
if (!fmt_in)
goto fail;
url_open_buf(&pb, c->buffer, c->buffer_end - c->buffer, URL_RDONLY);
pb = avio_alloc_context(c->buffer, c->buffer_end - c->buffer,
0, NULL, NULL, NULL, NULL);
pb->is_streamed = 1;
if (av_open_input_stream(&s, pb, c->stream->feed_filename, fmt_in, NULL) < 0) {
@ -2812,20 +2813,20 @@ static void rtsp_reply_header(HTTPContext *c, enum RTSPStatusCode error_number)
break;
}
url_fprintf(c->pb, "RTSP/1.0 %d %s\r\n", error_number, str);
url_fprintf(c->pb, "CSeq: %d\r\n", c->seq);
avio_printf(c->pb, "RTSP/1.0 %d %s\r\n", error_number, str);
avio_printf(c->pb, "CSeq: %d\r\n", c->seq);
/* output GMT time */
ti = time(NULL);
tm = gmtime(&ti);
strftime(buf2, sizeof(buf2), "%a, %d %b %Y %H:%M:%S", tm);
url_fprintf(c->pb, "Date: %s GMT\r\n", buf2);
avio_printf(c->pb, "Date: %s GMT\r\n", buf2);
}
static void rtsp_reply_error(HTTPContext *c, enum RTSPStatusCode error_number)
{
rtsp_reply_header(c, error_number);
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, "\r\n");
}
static int rtsp_parse_request(HTTPContext *c)
@ -2970,10 +2971,10 @@ static int prepare_sdp_description(FFStream *stream, uint8_t **pbuffer,
static void rtsp_cmd_options(HTTPContext *c, const char *url)
{
// rtsp_reply_header(c, RTSP_STATUS_OK);
url_fprintf(c->pb, "RTSP/1.0 %d %s\r\n", RTSP_STATUS_OK, "OK");
url_fprintf(c->pb, "CSeq: %d\r\n", c->seq);
url_fprintf(c->pb, "Public: %s\r\n", "OPTIONS, DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE");
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, "RTSP/1.0 %d %s\r\n", RTSP_STATUS_OK, "OK");
avio_printf(c->pb, "CSeq: %d\r\n", c->seq);
avio_printf(c->pb, "Public: %s\r\n", "OPTIONS, DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE");
avio_printf(c->pb, "\r\n");
}
static void rtsp_cmd_describe(HTTPContext *c, const char *url)
@ -3014,10 +3015,10 @@ static void rtsp_cmd_describe(HTTPContext *c, const char *url)
return;
}
rtsp_reply_header(c, RTSP_STATUS_OK);
url_fprintf(c->pb, "Content-Base: %s/\r\n", url);
url_fprintf(c->pb, "Content-Type: application/sdp\r\n");
url_fprintf(c->pb, "Content-Length: %d\r\n", content_length);
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, "Content-Base: %s/\r\n", url);
avio_printf(c->pb, "Content-Type: application/sdp\r\n");
avio_printf(c->pb, "Content-Length: %d\r\n", content_length);
avio_printf(c->pb, "\r\n");
avio_write(c->pb, content, content_length);
av_free(content);
}
@ -3163,30 +3164,30 @@ static void rtsp_cmd_setup(HTTPContext *c, const char *url,
/* now everything is OK, so we can send the connection parameters */
rtsp_reply_header(c, RTSP_STATUS_OK);
/* session ID */
url_fprintf(c->pb, "Session: %s\r\n", rtp_c->session_id);
avio_printf(c->pb, "Session: %s\r\n", rtp_c->session_id);
switch(rtp_c->rtp_protocol) {
case RTSP_LOWER_TRANSPORT_UDP:
rtp_port = rtp_get_local_rtp_port(rtp_c->rtp_handles[stream_index]);
rtcp_port = rtp_get_local_rtcp_port(rtp_c->rtp_handles[stream_index]);
url_fprintf(c->pb, "Transport: RTP/AVP/UDP;unicast;"
avio_printf(c->pb, "Transport: RTP/AVP/UDP;unicast;"
"client_port=%d-%d;server_port=%d-%d",
th->client_port_min, th->client_port_max,
rtp_port, rtcp_port);
break;
case RTSP_LOWER_TRANSPORT_TCP:
url_fprintf(c->pb, "Transport: RTP/AVP/TCP;interleaved=%d-%d",
avio_printf(c->pb, "Transport: RTP/AVP/TCP;interleaved=%d-%d",
stream_index * 2, stream_index * 2 + 1);
break;
default:
break;
}
if (setup.transport_option[0] != '\0')
url_fprintf(c->pb, ";%s", setup.transport_option);
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, ";%s", setup.transport_option);
avio_printf(c->pb, "\r\n");
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, "\r\n");
}
@ -3248,8 +3249,8 @@ static void rtsp_cmd_play(HTTPContext *c, const char *url, RTSPMessageHeader *h)
/* now everything is OK, so we can send the connection parameters */
rtsp_reply_header(c, RTSP_STATUS_OK);
/* session ID */
url_fprintf(c->pb, "Session: %s\r\n", rtp_c->session_id);
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, "Session: %s\r\n", rtp_c->session_id);
avio_printf(c->pb, "\r\n");
}
static void rtsp_cmd_pause(HTTPContext *c, const char *url, RTSPMessageHeader *h)
@ -3273,8 +3274,8 @@ static void rtsp_cmd_pause(HTTPContext *c, const char *url, RTSPMessageHeader *h
/* now everything is OK, so we can send the connection parameters */
rtsp_reply_header(c, RTSP_STATUS_OK);
/* session ID */
url_fprintf(c->pb, "Session: %s\r\n", rtp_c->session_id);
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, "Session: %s\r\n", rtp_c->session_id);
avio_printf(c->pb, "\r\n");
}
static void rtsp_cmd_teardown(HTTPContext *c, const char *url, RTSPMessageHeader *h)
@ -3296,8 +3297,8 @@ static void rtsp_cmd_teardown(HTTPContext *c, const char *url, RTSPMessageHeader
/* now everything is OK, so we can send the connection parameters */
rtsp_reply_header(c, RTSP_STATUS_OK);
/* session ID */
url_fprintf(c->pb, "Session: %s\r\n", session_id);
url_fprintf(c->pb, "\r\n");
avio_printf(c->pb, "Session: %s\r\n", session_id);
avio_printf(c->pb, "\r\n");
}

View File

@ -146,34 +146,34 @@ static av_always_inline float quantize_and_encode_band_cost_template(
curidx *= range;
curidx += quants[j] + off;
}
curbits = ff_aac_spectral_bits[cb-1][curidx];
vec = &ff_aac_codebook_vectors[cb-1][curidx*dim];
if (BT_UNSIGNED) {
for (k = 0; k < dim; k++) {
float t = fabsf(in[i+k]);
float di;
if (BT_ESC && vec[k] == 64.0f) { //FIXME: slow
if (t >= CLIPPED_ESCAPE) {
di = t - CLIPPED_ESCAPE;
curbits += 21;
} else {
int c = av_clip(quant(t, Q), 0, 8191);
di = t - c*cbrtf(c)*IQ;
curbits += av_log2(c)*2 - 4 + 1;
}
curbits = ff_aac_spectral_bits[cb-1][curidx];
vec = &ff_aac_codebook_vectors[cb-1][curidx*dim];
if (BT_UNSIGNED) {
for (k = 0; k < dim; k++) {
float t = fabsf(in[i+k]);
float di;
if (BT_ESC && vec[k] == 64.0f) { //FIXME: slow
if (t >= CLIPPED_ESCAPE) {
di = t - CLIPPED_ESCAPE;
curbits += 21;
} else {
di = t - vec[k]*IQ;
int c = av_clip(quant(t, Q), 0, 8191);
di = t - c*cbrtf(c)*IQ;
curbits += av_log2(c)*2 - 4 + 1;
}
if (vec[k] != 0.0f)
curbits++;
rd += di*di;
}
} else {
for (k = 0; k < dim; k++) {
float di = in[i+k] - vec[k]*IQ;
rd += di*di;
} else {
di = t - vec[k]*IQ;
}
if (vec[k] != 0.0f)
curbits++;
rd += di*di;
}
} else {
for (k = 0; k < dim; k++) {
float di = in[i+k] - vec[k]*IQ;
rd += di*di;
}
}
cost += rd * lambda + curbits;
resbits += curbits;
if (cost >= uplim)
@ -575,7 +575,7 @@ static void search_for_quantizers_anmr(AVCodecContext *avctx, AACEncContext *s,
int qnrg = av_clip_uint8(log2f(sqrtf(qnrgf/qcnt))*4 - 31 + SCALE_ONE_POS - SCALE_DIV_512);
q1 = qnrg + 30;
q0 = qnrg - 30;
//av_log(NULL, AV_LOG_ERROR, "q0 %d, q1 %d\n", q0, q1);
//av_log(NULL, AV_LOG_ERROR, "q0 %d, q1 %d\n", q0, q1);
if (q0 < q0low) {
q1 += q0low - q0;
q0 = q0low;
@ -723,7 +723,7 @@ static void search_for_quantizers_twoloop(AVCodecContext *avctx,
sce->zeroes[w*16+g] = !nz;
if (nz)
minthr = FFMIN(minthr, uplim);
allz = FFMAX(allz, nz);
allz |= nz;
}
}
for (w = 0; w < sce->ics.num_windows; w += sce->ics.group_len[w]) {

View File

@ -39,8 +39,8 @@
* constants for 3GPP AAC psychoacoustic model
* @{
*/
#define PSY_3GPP_SPREAD_HI 1.5f // spreading factor for ascending threshold spreading (15 dB/Bark)
#define PSY_3GPP_SPREAD_LOW 3.0f // spreading factor for descending threshold spreading (30 dB/Bark)
#define PSY_3GPP_THR_SPREAD_HI 1.5f // spreading factor for low-to-hi threshold spreading (15 dB/Bark)
#define PSY_3GPP_THR_SPREAD_LOW 3.0f // spreading factor for hi-to-low threshold spreading (30 dB/Bark)
#define PSY_3GPP_RPEMIN 0.01f
#define PSY_3GPP_RPELEV 2.0f
@ -61,9 +61,7 @@
*/
typedef struct AacPsyBand{
float energy; ///< band energy
float ffac; ///< form factor
float thr; ///< energy threshold
float min_snr; ///< minimal SNR
float thr_quiet; ///< threshold in quiet
}AacPsyBand;
@ -88,17 +86,18 @@ typedef struct AacPsyChannel{
* psychoacoustic model frame type-dependent coefficients
*/
typedef struct AacPsyCoeffs{
float ath [64]; ///< absolute threshold of hearing per bands
float barks [64]; ///< Bark value for each spectral band in long frame
float spread_low[64]; ///< spreading factor for low-to-high threshold spreading in long frame
float spread_hi [64]; ///< spreading factor for high-to-low threshold spreading in long frame
float ath; ///< absolute threshold of hearing per bands
float barks; ///< Bark value for each spectral band in long frame
float spread_low[2]; ///< spreading factor for low-to-high threshold spreading in long frame
float spread_hi [2]; ///< spreading factor for high-to-low threshold spreading in long frame
float min_snr; ///< minimal SNR
}AacPsyCoeffs;
/**
* 3GPP TS26.403-inspired psychoacoustic model specific data
*/
typedef struct AacPsyContext{
AacPsyCoeffs psy_coef[2];
AacPsyCoeffs psy_coef[2][64];
AacPsyChannel *ch;
}AacPsyContext;
@ -243,27 +242,30 @@ static av_cold int psy_3gpp_init(FFPsyContext *ctx) {
minath = ath(3410, ATH_ADD);
for (j = 0; j < 2; j++) {
AacPsyCoeffs *coeffs = &pctx->psy_coef[j];
AacPsyCoeffs *coeffs = pctx->psy_coef[j];
const uint8_t *band_sizes = ctx->bands[j];
float line_to_frequency = ctx->avctx->sample_rate / (j ? 256.f : 2048.0f);
i = 0;
prev = 0.0;
for (g = 0; g < ctx->num_bands[j]; g++) {
i += ctx->bands[j][g];
i += band_sizes[g];
bark = calc_bark((i-1) * line_to_frequency);
coeffs->barks[g] = (bark + prev) / 2.0;
coeffs[g].barks = (bark + prev) / 2.0;
prev = bark;
}
for (g = 0; g < ctx->num_bands[j] - 1; g++) {
coeffs->spread_low[g] = pow(10.0, -(coeffs->barks[g+1] - coeffs->barks[g]) * PSY_3GPP_SPREAD_LOW);
coeffs->spread_hi [g] = pow(10.0, -(coeffs->barks[g+1] - coeffs->barks[g]) * PSY_3GPP_SPREAD_HI);
AacPsyCoeffs *coeff = &coeffs[g];
float bark_width = coeffs[g+1].barks - coeffs->barks;
coeff->spread_low[0] = pow(10.0, -bark_width * PSY_3GPP_THR_SPREAD_LOW);
coeff->spread_hi [0] = pow(10.0, -bark_width * PSY_3GPP_THR_SPREAD_HI);
}
start = 0;
for (g = 0; g < ctx->num_bands[j]; g++) {
minscale = ath(start * line_to_frequency, ATH_ADD);
for (i = 1; i < ctx->bands[j][g]; i++)
for (i = 1; i < band_sizes[g]; i++)
minscale = FFMIN(minscale, ath((start + i) * line_to_frequency, ATH_ADD));
coeffs->ath[g] = minscale - minath;
start += ctx->bands[j][g];
coeffs[g].ath = minscale - minath;
start += band_sizes[g];
}
}
@ -393,9 +395,9 @@ static void psy_3gpp_analyze(FFPsyContext *ctx, int channel,
AacPsyChannel *pch = &pctx->ch[channel];
int start = 0;
int i, w, g;
const int num_bands = ctx->num_bands[wi->num_windows == 8];
const uint8_t* band_sizes = ctx->bands[wi->num_windows == 8];
AacPsyCoeffs *coeffs = &pctx->psy_coef[wi->num_windows == 8];
const int num_bands = ctx->num_bands[wi->num_windows == 8];
const uint8_t *band_sizes = ctx->bands[wi->num_windows == 8];
AacPsyCoeffs *coeffs = &pctx->psy_coef[wi->num_windows == 8];
//calculate energies, initial thresholds and related values - 5.4.2 "Threshold Calculation"
for (w = 0; w < wi->num_windows*16; w += 16) {
@ -406,26 +408,37 @@ static void psy_3gpp_analyze(FFPsyContext *ctx, int channel,
band->energy += coefs[start+i] * coefs[start+i];
band->thr = band->energy * 0.001258925f;
start += band_sizes[g];
ctx->psy_bands[channel*PSY_MAX_BANDS+w+g].energy = band->energy;
}
}
//modify thresholds - spread, threshold in quiet - 5.4.3 "Spreaded Energy Calculation"
//modify thresholds and energies - spread, threshold in quiet, pre-echo control
for (w = 0; w < wi->num_windows*16; w += 16) {
AacPsyBand *band = &pch->band[w];
AacPsyBand *bands = &pch->band[w];
//5.4.2.3 "Spreading" & 5.4.3 "Spreaded Energy Calculation"
for (g = 1; g < num_bands; g++)
band[g].thr = FFMAX(band[g].thr, band[g-1].thr * coeffs->spread_hi [g]);
bands[g].thr = FFMAX(bands[g].thr, bands[g-1].thr * coeffs[g].spread_hi[0]);
for (g = num_bands - 2; g >= 0; g--)
band[g].thr = FFMAX(band[g].thr, band[g+1].thr * coeffs->spread_low[g]);
bands[g].thr = FFMAX(bands[g].thr, bands[g+1].thr * coeffs[g].spread_low[0]);
//5.4.2.4 "Threshold in quiet"
for (g = 0; g < num_bands; g++) {
band[g].thr_quiet = band[g].thr = FFMAX(band[g].thr, coeffs->ath[g]);
AacPsyBand *band = &bands[g];
band->thr_quiet = band->thr = FFMAX(band->thr, coeffs[g].ath);
//5.4.2.5 "Pre-echo control"
if (!(wi->window_type[0] == LONG_STOP_SEQUENCE || (wi->window_type[1] == LONG_START_SEQUENCE && !w)))
band[g].thr = FFMAX(PSY_3GPP_RPEMIN*band[g].thr, FFMIN(band[g].thr,
PSY_3GPP_RPELEV*pch->prev_band[w+g].thr_quiet));
ctx->psy_bands[channel*PSY_MAX_BANDS+w+g].threshold = band[g].thr;
band->thr = FFMAX(PSY_3GPP_RPEMIN*band->thr, FFMIN(band->thr,
PSY_3GPP_RPELEV*pch->prev_band[w+g].thr_quiet));
}
}
for (w = 0; w < wi->num_windows*16; w += 16) {
for (g = 0; g < num_bands; g++) {
AacPsyBand *band = &pch->band[w+g];
FFPsyBand *psy_band = &ctx->psy_bands[channel*PSY_MAX_BANDS+w+g];
psy_band->threshold = band->thr;
psy_band->energy = band->energy;
}
}
memcpy(pch->prev_band, pch->band, sizeof(pch->band));
}
@ -553,22 +566,9 @@ static FFPsyWindowInfo psy_lame_window(FFPsyContext *ctx,
if (pch->prev_attack == 3 || att_sum) {
uselongblock = 0;
if (attacks[1] && attacks[0])
attacks[1] = 0;
if (attacks[2] && attacks[1])
attacks[2] = 0;
if (attacks[3] && attacks[2])
attacks[3] = 0;
if (attacks[4] && attacks[3])
attacks[4] = 0;
if (attacks[5] && attacks[4])
attacks[5] = 0;
if (attacks[6] && attacks[5])
attacks[6] = 0;
if (attacks[7] && attacks[6])
attacks[7] = 0;
if (attacks[8] && attacks[7])
attacks[8] = 0;
for (i = 1; i < AAC_NUM_BLOCKS_SHORT + 1; i++)
if (attacks[i] && attacks[i-1])
attacks[i] = 0;
}
} else {
/* We have no lookahead info, so just use same type as the previous sequence. */

View File

@ -222,6 +222,12 @@ static void bswap_buf(uint32_t *dst, const uint32_t *src, int w){
}
}
static void bswap16_buf(uint16_t *dst, const uint16_t *src, int len)
{
while (len--)
*dst++ = av_bswap16(*src++);
}
static int sse4_c(void *v, uint8_t * pix1, uint8_t * pix2, int line_size, int h)
{
int s, i;
@ -4324,6 +4330,7 @@ av_cold void dsputil_init(DSPContext* c, AVCodecContext *avctx)
c->add_hfyu_left_prediction = add_hfyu_left_prediction_c;
c->add_hfyu_left_prediction_bgr32 = add_hfyu_left_prediction_bgr32_c;
c->bswap_buf= bswap_buf;
c->bswap16_buf = bswap16_buf;
#if CONFIG_PNG_DECODER
c->add_png_paeth_prediction= ff_add_png_paeth_prediction;
#endif

View File

@ -369,6 +369,7 @@ typedef struct DSPContext {
/* this might write to dst[w] */
void (*add_png_paeth_prediction)(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp);
void (*bswap_buf)(uint32_t *dst, const uint32_t *src, int w);
void (*bswap16_buf)(uint16_t *dst, const uint16_t *src, int len);
void (*h263_v_loop_filter)(uint8_t *src, int stride, int qscale);
void (*h263_h_loop_filter)(uint8_t *src, int stride, int qscale);

View File

@ -1078,13 +1078,13 @@ unsigned avcodec_version( void )
const char *avcodec_configuration(void)
{
return FFMPEG_CONFIGURATION;
return LIBAV_CONFIGURATION;
}
const char *avcodec_license(void)
{
#define LICENSE_PREFIX "libavcodec license: "
return LICENSE_PREFIX FFMPEG_LICENSE + sizeof(LICENSE_PREFIX) - 1;
return LICENSE_PREFIX LIBAV_LICENSE + sizeof(LICENSE_PREFIX) - 1;
}
void avcodec_init(void)

View File

@ -26,24 +26,45 @@
#include "libavutil/common.h"
#if ARCH_X86_32
#define MULL(ra, rb, shift) \
({ int rt, dummy; __asm__ (\
"imull %3 \n\t"\
"shrdl %4, %%edx, %%eax \n\t"\
: "=a"(rt), "=d"(dummy)\
: "a" ((int)(ra)), "rm" ((int)(rb)), "i"(shift));\
rt; })
#define MULH(ra, rb) \
({ int rt, dummy;\
__asm__ ("imull %3\n\t" : "=d"(rt), "=a"(dummy): "a" ((int)(ra)), "rm" ((int)(rb)));\
rt; })
#define MULL MULL
static av_always_inline av_const int MULL(int a, int b, unsigned shift)
{
int rt, dummy;
__asm__ (
"imull %3 \n\t"
"shrdl %4, %%edx, %%eax \n\t"
:"=a"(rt), "=d"(dummy)
:"a"(a), "rm"(b), "ci"((uint8_t)shift)
);
return rt;
}
#define MUL64(ra, rb) \
({ int64_t rt;\
__asm__ ("imull %2\n\t" : "=A"(rt) : "a" ((int)(ra)), "g" ((int)(rb)));\
rt; })
#endif
#define MULH MULH
static av_always_inline av_const int MULH(int a, int b)
{
int rt, dummy;
__asm__ (
"imull %3"
:"=d"(rt), "=a"(dummy)
:"a"(a), "rm"(b)
);
return rt;
}
#define MUL64 MUL64
static av_always_inline av_const int64_t MUL64(int a, int b)
{
int64_t rt;
__asm__ (
"imull %2"
:"=A"(rt)
:"a"(a), "rm"(b)
);
return rt;
}
#endif /* ARCH_X86_32 */
#if HAVE_CMOV
/* median of 3 */

View File

@ -25,11 +25,11 @@ unsigned avdevice_version(void)
const char * avdevice_configuration(void)
{
return FFMPEG_CONFIGURATION;
return LIBAV_CONFIGURATION;
}
const char * avdevice_license(void)
{
#define LICENSE_PREFIX "libavdevice license: "
return LICENSE_PREFIX FFMPEG_LICENSE + sizeof(LICENSE_PREFIX) - 1;
return LICENSE_PREFIX LIBAV_LICENSE + sizeof(LICENSE_PREFIX) - 1;
}

View File

@ -34,13 +34,13 @@ unsigned avfilter_version(void) {
const char *avfilter_configuration(void)
{
return FFMPEG_CONFIGURATION;
return LIBAV_CONFIGURATION;
}
const char *avfilter_license(void)
{
#define LICENSE_PREFIX "libavfilter license: "
return LICENSE_PREFIX FFMPEG_LICENSE + sizeof(LICENSE_PREFIX) - 1;
return LICENSE_PREFIX LIBAV_LICENSE + sizeof(LICENSE_PREFIX) - 1;
}
AVFilterBufferRef *avfilter_ref_buffer(AVFilterBufferRef *ref, int pmask)

View File

@ -106,7 +106,7 @@ static int fourxm_read_header(AVFormatContext *s,
fourxm->fps = 1.0;
/* skip the first 3 32-bit numbers */
avio_seek(pb, 12, SEEK_CUR);
avio_skip(pb, 12);
/* check for LIST-HEAD */
GET_LIST_HEADER();
@ -322,12 +322,12 @@ static int fourxm_read_packet(AVFormatContext *s,
fourxm->tracks[track_number].audio_pts += audio_frame_count;
} else {
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
}
break;
default:
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
break;
}
}

View File

@ -131,9 +131,9 @@ OBJS-$(CONFIG_MMF_MUXER) += mmf.o riff.o
OBJS-$(CONFIG_MOV_DEMUXER) += mov.o riff.o isom.o
OBJS-$(CONFIG_MOV_MUXER) += movenc.o riff.o isom.o avc.o \
movenchint.o rtpenc_chain.o
OBJS-$(CONFIG_MP2_MUXER) += mp3enc.o
OBJS-$(CONFIG_MP2_MUXER) += mp3enc.o rawenc.o
OBJS-$(CONFIG_MP3_DEMUXER) += mp3dec.o
OBJS-$(CONFIG_MP3_MUXER) += mp3enc.o
OBJS-$(CONFIG_MP3_MUXER) += mp3enc.o rawenc.o
OBJS-$(CONFIG_MPC_DEMUXER) += mpc.o apetag.o
OBJS-$(CONFIG_MPC8_DEMUXER) += mpc8.o
OBJS-$(CONFIG_MPEG1SYSTEM_MUXER) += mpegenc.o

View File

@ -149,7 +149,7 @@ static int a64_write_packet(struct AVFormatContext *s, AVPacket *pkt)
break;
}
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -132,7 +132,7 @@ static int adts_write_packet(AVFormatContext *s, AVPacket *pkt)
}
}
avio_write(pb, pkt->data, pkt->size);
put_flush_packet(pb);
avio_flush(pb);
return 0;
}

View File

@ -62,9 +62,9 @@ static int aea_read_header(AVFormatContext *s,
return AVERROR(ENOMEM);
/* Parse the amount of channels and skip to pos 2048(0x800) */
avio_seek(s->pb, 264, SEEK_CUR);
avio_skip(s->pb, 264);
st->codec->channels = avio_r8(s->pb);
avio_seek(s->pb, 1783, SEEK_CUR);
avio_skip(s->pb, 1783);
st->codec->codec_type = AVMEDIA_TYPE_AUDIO;

View File

@ -70,7 +70,7 @@ static void get_meta(AVFormatContext *s, const char *key, int size)
int res;
if (!str) {
avio_seek(s->pb, size, SEEK_CUR);
avio_skip(s->pb, size);
return;
}
@ -152,7 +152,7 @@ static unsigned int get_aiff_header(AVIOContext *pb, AVCodecContext *codec,
/* Chunk is over */
if (size)
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
return num_frames;
}
@ -242,7 +242,7 @@ static int aiff_read_header(AVFormatContext *s,
av_log(s, AV_LOG_ERROR, "file is not seekable\n");
return -1;
}
avio_seek(pb, size - 8, SEEK_CUR);
avio_skip(pb, size - 8);
break;
case MKTAG('w', 'a', 'v', 'e'):
if ((uint64_t)size > (1<<30))
@ -256,7 +256,7 @@ static int aiff_read_header(AVFormatContext *s,
default: /* Jump */
if (size & 1) /* Always even aligned */
size++;
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
}
}

View File

@ -98,7 +98,7 @@ static int aiff_write_header(AVFormatContext *s)
av_set_pts_info(s->streams[0], 64, 1, s->streams[0]->codec->sample_rate);
/* Data is starting here */
put_flush_packet(pb);
avio_flush(pb);
return 0;
}
@ -140,7 +140,7 @@ static int aiff_write_trailer(AVFormatContext *s)
/* return to the end */
avio_seek(pb, end_size, SEEK_SET);
put_flush_packet(pb);
avio_flush(pb);
}
return 0;

View File

@ -50,14 +50,14 @@ static int amr_write_header(AVFormatContext *s)
{
return -1;
}
put_flush_packet(pb);
avio_flush(pb);
return 0;
}
static int amr_write_packet(AVFormatContext *s, AVPacket *pkt)
{
avio_write(s->pb, pkt->data, pkt->size);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}
#endif /* CONFIG_AMR_MUXER */

View File

@ -83,7 +83,7 @@ static int read_header(AVFormatContext *s,
AVStream *st;
int i, ret;
avio_seek(pb, 4, SEEK_CUR); /* magic number */
avio_skip(pb, 4); /* magic number */
if (avio_rl16(pb) != MAX_PAGES) {
av_log_ask_for_sample(s, "max_pages != " AV_STRINGIFY(MAX_PAGES) "\n");
return AVERROR_INVALIDDATA;
@ -91,7 +91,7 @@ static int read_header(AVFormatContext *s,
anm->nb_pages = avio_rl16(pb);
anm->nb_records = avio_rl32(pb);
avio_seek(pb, 2, SEEK_CUR); /* max records per page */
avio_skip(pb, 2); /* max records per page */
anm->page_table_offset = avio_rl16(pb);
if (avio_rl32(pb) != ANIM_TAG)
return AVERROR_INVALIDDATA;
@ -107,13 +107,13 @@ static int read_header(AVFormatContext *s,
st->codec->height = avio_rl16(pb);
if (avio_r8(pb) != 0)
goto invalid;
avio_seek(pb, 1, SEEK_CUR); /* frame rate multiplier info */
avio_skip(pb, 1); /* frame rate multiplier info */
/* ignore last delta record (used for looping) */
if (avio_r8(pb)) /* has_last_delta */
anm->nb_records = FFMAX(anm->nb_records - 1, 0);
avio_seek(pb, 1, SEEK_CUR); /* last_delta_valid */
avio_skip(pb, 1); /* last_delta_valid */
if (avio_r8(pb) != 0)
goto invalid;
@ -121,15 +121,15 @@ static int read_header(AVFormatContext *s,
if (avio_r8(pb) != 1)
goto invalid;
avio_seek(pb, 1, SEEK_CUR); /* other recs per frame */
avio_skip(pb, 1); /* other recs per frame */
if (avio_r8(pb) != 1)
goto invalid;
avio_seek(pb, 32, SEEK_CUR); /* record_types */
avio_skip(pb, 32); /* record_types */
st->nb_frames = avio_rl32(pb);
av_set_pts_info(st, 64, 1, avio_rl16(pb));
avio_seek(pb, 58, SEEK_CUR);
avio_skip(pb, 58);
/* color cycling and palette data */
st->codec->extradata_size = 16*8 + 4*256;
@ -193,7 +193,7 @@ repeat:
/* parse page header */
if (anm->record < 0) {
avio_seek(pb, anm->page_table_offset + MAX_PAGES*6 + (anm->page<<16), SEEK_SET);
avio_seek(pb, 8 + 2*p->nb_records, SEEK_CUR);
avio_skip(pb, 8 + 2*p->nb_records);
anm->record = 0;
}

View File

@ -187,7 +187,7 @@ static int ape_read_header(AVFormatContext * s, AVFormatParameters * ap)
/* Skip any unknown bytes at the end of the descriptor.
This is for future compatibility */
if (ape->descriptorlength > 52)
avio_seek(pb, ape->descriptorlength - 52, SEEK_CUR);
avio_skip(pb, ape->descriptorlength - 52);
/* Read header data */
ape->compressiontype = avio_rl16(pb);
@ -212,7 +212,7 @@ static int ape_read_header(AVFormatContext * s, AVFormatParameters * ap)
ape->finalframeblocks = avio_rl32(pb);
if (ape->formatflags & MAC_FORMAT_FLAG_HAS_PEAK_LEVEL) {
avio_seek(pb, 4, SEEK_CUR); /* Skip the peak level */
avio_skip(pb, 4); /* Skip the peak level */
ape->headerlength += 4;
}
@ -239,7 +239,7 @@ static int ape_read_header(AVFormatContext * s, AVFormatParameters * ap)
/* Skip any stored wav header */
if (!(ape->formatflags & MAC_FORMAT_FLAG_CREATE_WAV_HEADER))
avio_seek(pb, ape->wavheaderlength, SEEK_CUR);
avio_skip(pb, ape->wavheaderlength);
}
if(!ape->totalframes){

View File

@ -86,57 +86,6 @@ static int read_chomp_line(AVIOContext *s, char *buf, int maxlen)
return len;
}
static void make_absolute_url(char *buf, int size, const char *base,
const char *rel)
{
char *sep;
/* Absolute path, relative to the current server */
if (base && strstr(base, "://") && rel[0] == '/') {
if (base != buf)
av_strlcpy(buf, base, size);
sep = strstr(buf, "://");
if (sep) {
sep += 3;
sep = strchr(sep, '/');
if (sep)
*sep = '\0';
}
av_strlcat(buf, rel, size);
return;
}
/* If rel actually is an absolute url, just copy it */
if (!base || strstr(rel, "://") || rel[0] == '/') {
av_strlcpy(buf, rel, size);
return;
}
if (base != buf)
av_strlcpy(buf, base, size);
/* Remove the file name from the base url */
sep = strrchr(buf, '/');
if (sep)
sep[1] = '\0';
else
buf[0] = '\0';
while (av_strstart(rel, "../", NULL) && sep) {
/* Remove the path delimiter at the end */
sep[0] = '\0';
sep = strrchr(buf, '/');
/* If the next directory name to pop off is "..", break here */
if (!strcmp(sep ? &sep[1] : buf, "..")) {
/* Readd the slash we just removed */
av_strlcat(buf, "/", size);
break;
}
/* Cut off the directory name */
if (sep)
sep[1] = '\0';
else
buf[0] = '\0';
rel += 3;
}
av_strlcat(buf, rel, size);
}
static void free_segment_list(struct variant *var)
{
int i;
@ -183,7 +132,7 @@ static struct variant *new_variant(AppleHTTPContext *c, int bandwidth,
return NULL;
reset_packet(&var->pkt);
var->bandwidth = bandwidth;
make_absolute_url(var->url, sizeof(var->url), base, url);
ff_make_absolute_url(var->url, sizeof(var->url), base, url);
dynarray_add(&c->variants, &c->n_variants, var);
return var;
}
@ -274,7 +223,7 @@ static int parse_playlist(AppleHTTPContext *c, const char *url,
goto fail;
}
seg->duration = duration;
make_absolute_url(seg->url, sizeof(seg->url), url, line);
ff_make_absolute_url(seg->url, sizeof(seg->url), url, line);
dynarray_add(&var->segments, &var->n_segments, seg);
is_segment = 0;
}
@ -519,7 +468,7 @@ reload:
c->max_start_seq - c->cur_seq_no);
c->cur_seq_no = c->max_start_seq;
}
/* If more segments exit, open the next one */
/* If more segments exist, open the next one */
if (c->cur_seq_no < c->min_end_seq)
goto start;
/* We've reached the end of the playlists - return eof if this is a

View File

@ -75,57 +75,6 @@ static int read_chomp_line(AVIOContext *s, char *buf, int maxlen)
return len;
}
static void make_absolute_url(char *buf, int size, const char *base,
const char *rel)
{
char *sep;
/* Absolute path, relative to the current server */
if (base && strstr(base, "://") && rel[0] == '/') {
if (base != buf)
av_strlcpy(buf, base, size);
sep = strstr(buf, "://");
if (sep) {
sep += 3;
sep = strchr(sep, '/');
if (sep)
*sep = '\0';
}
av_strlcat(buf, rel, size);
return;
}
/* If rel actually is an absolute url, just copy it */
if (!base || strstr(rel, "://") || rel[0] == '/') {
av_strlcpy(buf, rel, size);
return;
}
if (base != buf)
av_strlcpy(buf, base, size);
/* Remove the file name from the base url */
sep = strrchr(buf, '/');
if (sep)
sep[1] = '\0';
else
buf[0] = '\0';
while (av_strstart(rel, "../", NULL) && sep) {
/* Remove the path delimiter at the end */
sep[0] = '\0';
sep = strrchr(buf, '/');
/* If the next directory name to pop off is "..", break here */
if (!strcmp(sep ? &sep[1] : buf, "..")) {
/* Readd the slash we just removed */
av_strlcat(buf, "/", size);
break;
}
/* Cut off the directory name */
if (sep)
sep[1] = '\0';
else
buf[0] = '\0';
rel += 3;
}
av_strlcat(buf, rel, size);
}
static void free_segment_list(AppleHTTPContext *s)
{
int i;
@ -201,7 +150,7 @@ static int parse_playlist(URLContext *h, const char *url)
goto fail;
}
seg->duration = duration;
make_absolute_url(seg->url, sizeof(seg->url), url, line);
ff_make_absolute_url(seg->url, sizeof(seg->url), url, line);
dynarray_add(&s->segments, &s->n_segments, seg);
is_segment = 0;
} else if (is_variant) {
@ -211,7 +160,7 @@ static int parse_playlist(URLContext *h, const char *url)
goto fail;
}
var->bandwidth = bandwidth;
make_absolute_url(var->url, sizeof(var->url), url, line);
ff_make_absolute_url(var->url, sizeof(var->url), url, line);
dynarray_add(&s->variants, &s->n_variants, var);
is_variant = 0;
}

View File

@ -349,7 +349,7 @@ static int asf_read_stream_properties(AVFormatContext *s, int64_t size)
avio_rl16(pb); /* panes */
st->codec->bits_per_coded_sample = avio_rl16(pb); /* depth */
tag1 = avio_rl32(pb);
avio_seek(pb, 20, SEEK_CUR);
avio_skip(pb, 20);
// av_log(s, AV_LOG_DEBUG, "size:%d tsize:%d sizeX:%d\n", size, total_size, sizeX);
if (sizeX > 40) {
st->codec->extradata_size = sizeX - 40;
@ -387,7 +387,7 @@ static int asf_read_stream_properties(AVFormatContext *s, int64_t size)
st->need_parsing = AVSTREAM_PARSE_FULL_ONCE;
}
pos2 = avio_tell(pb);
avio_seek(pb, size - (pos2 - pos1 + 24), SEEK_CUR);
avio_skip(pb, size - (pos2 - pos1 + 24));
return 0;
}
@ -427,14 +427,14 @@ static int asf_read_ext_stream_properties(AVFormatContext *s, int64_t size)
for (i=0; i<stream_ct; i++){
avio_rl16(pb);
ext_len = avio_rl16(pb);
avio_seek(pb, ext_len, SEEK_CUR);
avio_skip(pb, ext_len);
}
for (i=0; i<payload_ext_ct; i++){
ff_get_guid(pb, &g);
ext_d=avio_rl16(pb);
ext_len=avio_rl32(pb);
avio_seek(pb, ext_len, SEEK_CUR);
avio_skip(pb, ext_len);
}
return 0;
@ -454,7 +454,7 @@ static int asf_read_content_desc(AVFormatContext *s, int64_t size)
get_tag(s, "author" , 0, len2);
get_tag(s, "copyright", 0, len3);
get_tag(s, "comment" , 0, len4);
avio_seek(pb, len5, SEEK_CUR);
avio_skip(pb, len5);
return 0;
}
@ -474,7 +474,7 @@ static int asf_read_ext_content_desc(AVFormatContext *s, int64_t size)
if (name_len%2) // must be even, broken lavf versions wrote len-1
name_len += 1;
if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
avio_seek(pb, name_len - ret, SEEK_CUR);
avio_skip(pb, name_len - ret);
value_type = avio_rl16(pb);
value_len = avio_rl16(pb);
if (!value_type && value_len%2)
@ -504,7 +504,7 @@ static int asf_read_language_list(AVFormatContext *s, int64_t size)
char lang[6];
unsigned int lang_len = avio_r8(pb);
if ((ret = avio_get_str16le(pb, lang_len, lang, sizeof(lang))) < lang_len)
avio_seek(pb, lang_len - ret, SEEK_CUR);
avio_skip(pb, lang_len - ret);
if (j < 128)
av_strlcpy(asf->stream_languages[j], lang, sizeof(*asf->stream_languages));
}
@ -530,10 +530,10 @@ static int asf_read_metadata(AVFormatContext *s, int64_t size)
value_len= avio_rl32(pb);
if ((ret = avio_get_str16le(pb, name_len, name, sizeof(name))) < name_len)
avio_seek(pb, name_len - ret, SEEK_CUR);
avio_skip(pb, name_len - ret);
//av_log(s, AV_LOG_ERROR, "%d %d %d %d %d <%s>\n", i, stream_num, name_len, value_type, value_len, name);
value_num= avio_rl16(pb);//we should use get_value() here but it does not work 2 is le16 here but le32 elsewhere
avio_seek(pb, value_len - 2, SEEK_CUR);
avio_skip(pb, value_len - 2);
if(stream_num<128){
if (!strcmp(name, "AspectRatioX")) asf->dar[stream_num].num= value_num;
@ -570,7 +570,7 @@ static int asf_read_marker(AVFormatContext *s, int64_t size)
avio_rl32(pb); // flags
name_len = avio_rl32(pb); // name length
if ((ret = avio_get_str16le(pb, name_len * 2, name, sizeof(name))) < name_len)
avio_seek(pb, name_len - ret, SEEK_CUR);
avio_skip(pb, name_len - ret);
ff_new_chapter(s, i, (AVRational){1, 10000000}, pres_time, AV_NOPTS_VALUE, name );
}
@ -825,16 +825,16 @@ static int asf_read_frame_header(AVFormatContext *s, AVIOContext *pb){
// for(i=0; i<asf->packet_replic_size-8; i++)
// av_log(s, AV_LOG_DEBUG, "%02X ",avio_r8(pb));
// av_log(s, AV_LOG_DEBUG, "\n");
avio_seek(pb, 10, SEEK_CUR);
avio_skip(pb, 10);
ts0= avio_rl64(pb);
ts1= avio_rl64(pb);
avio_seek(pb, 12, SEEK_CUR);
avio_skip(pb, 12);
avio_rl32(pb);
avio_seek(pb, asf->packet_replic_size - 8 - 38 - 4, SEEK_CUR);
avio_skip(pb, asf->packet_replic_size - 8 - 38 - 4);
if(ts0!= -1) asf->packet_frag_timestamp= ts0/10000;
else asf->packet_frag_timestamp= AV_NOPTS_VALUE;
}else
avio_seek(pb, asf->packet_replic_size - 8, SEEK_CUR);
avio_skip(pb, asf->packet_replic_size - 8);
rsize += asf->packet_replic_size; // FIXME - check validity
} else if (asf->packet_replic_size==1){
// multipacket - frag_offset is beginning timestamp
@ -894,7 +894,7 @@ static int ff_asf_parse_packet(AVFormatContext *s, AVIOContext *pb, AVPacket *pk
//printf("PacketLeftSize:%d Pad:%d Pos:%"PRId64"\n", asf->packet_size_left, asf->packet_padsize, avio_tell(pb));
assert(ret>=0);
/* fail safe */
avio_seek(pb, ret, SEEK_CUR);
avio_skip(pb, ret);
asf->packet_pos= avio_tell(pb);
if (asf->data_object_size != (uint64_t)-1 &&
@ -913,7 +913,7 @@ static int ff_asf_parse_packet(AVFormatContext *s, AVIOContext *pb, AVPacket *pk
) {
asf->packet_time_start = 0;
/* unhandled packet (should not happen) */
avio_seek(pb, asf->packet_frag_size, SEEK_CUR);
avio_skip(pb, asf->packet_frag_size);
asf->packet_size_left -= asf->packet_frag_size;
if(asf->stream_index < 0)
av_log(s, AV_LOG_ERROR, "ff asf skip %d (unknown stream)\n", asf->packet_frag_size);
@ -933,7 +933,7 @@ static int ff_asf_parse_packet(AVFormatContext *s, AVIOContext *pb, AVPacket *pk
if (asf->packet_multi_size < asf->packet_obj_size)
{
asf->packet_time_start = 0;
avio_seek(pb, asf->packet_multi_size, SEEK_CUR);
avio_skip(pb, asf->packet_multi_size);
asf->packet_size_left -= asf->packet_multi_size;
continue;
}
@ -1198,7 +1198,7 @@ static void asf_build_simple_index(AVFormatContext *s, int stream_index)
avio_seek(s->pb, current_pos, SEEK_SET);
return;
}
avio_seek(s->pb, gsize-24, SEEK_CUR);
avio_skip(s->pb, gsize-24);
ff_get_guid(s->pb, &g);
}

View File

@ -574,7 +574,7 @@ static int asf_write_header(AVFormatContext *s)
return -1;
}
put_flush_packet(s->pb);
avio_flush(s->pb);
asf->packet_nb_payloads = 0;
asf->packet_timestamp_start = -1;
@ -672,7 +672,7 @@ static void flush_packet(AVFormatContext *s)
avio_write(s->pb, asf->packet_buf, s->packet_size - packet_hdr_size);
put_flush_packet(s->pb);
avio_flush(s->pb);
asf->nb_packets++;
asf->packet_nb_payloads = 0;
asf->packet_timestamp_start = -1;
@ -864,7 +864,7 @@ static int asf_write_trailer(AVFormatContext *s)
if ((!asf->is_streamed) && (asf->nb_index_count != 0)) {
asf_write_index(s, asf->index_ptr, asf->maximum_packet, asf->nb_index_count);
}
put_flush_packet(s->pb);
avio_flush(s->pb);
if (asf->is_streamed || url_is_streamed(s->pb)) {
put_chunk(s, 0x4524, 0, 0); /* end of stream */
@ -875,7 +875,7 @@ static int asf_write_trailer(AVFormatContext *s)
asf_write_header1(s, file_size, data_size - asf->data_offset);
}
put_flush_packet(s->pb);
avio_flush(s->pb);
av_free(asf->index_ptr);
return 0;
}

View File

@ -50,7 +50,7 @@ static int write_header(AVFormatContext *s)
last=p;
}
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}
@ -59,7 +59,7 @@ static int write_packet(AVFormatContext *s, AVPacket *pkt)
{
avio_write(s->pb, pkt->data, pkt->size);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}
@ -72,7 +72,7 @@ static int write_trailer(AVFormatContext *s)
avio_write(s->pb, avctx->extradata + ass->extra_index,
avctx->extradata_size - ass->extra_index);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -74,7 +74,7 @@ static int au_write_header(AVFormatContext *s)
return -1;
}
put_flush_packet(pb);
avio_flush(pb);
return 0;
}
@ -99,7 +99,7 @@ static int au_write_trailer(AVFormatContext *s)
avio_wb32(pb, (uint32_t)(file_size - 24));
avio_seek(pb, file_size, SEEK_SET);
put_flush_packet(pb);
avio_flush(pb);
}
return 0;
@ -147,7 +147,7 @@ static int au_read_header(AVFormatContext *s,
if (size >= 24) {
/* skip unused data */
avio_seek(pb, size - 24, SEEK_CUR);
avio_skip(pb, size - 24);
}
/* now we are ready: build format streams */

View File

@ -321,12 +321,12 @@ static void avi_read_nikon(AVFormatContext *s, uint64_t end)
}
if (name)
av_metadata_set2(&s->metadata, name, buffer, 0);
avio_seek(s->pb, size, SEEK_CUR);
avio_skip(s->pb, size);
}
break;
}
default:
avio_seek(s->pb, size, SEEK_CUR);
avio_skip(s->pb, size);
break;
}
}
@ -392,13 +392,13 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
unsigned char date[64] = {0};
size += (size & 1);
size -= avio_read(pb, date, FFMIN(size, sizeof(date)-1));
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
avi_metadata_creation_time(&s->metadata, date);
break;
}
case MKTAG('d', 'm', 'l', 'h'):
avi->is_odml = 1;
avio_seek(pb, size + (size & 1), SEEK_CUR);
avio_skip(pb, size + (size & 1));
break;
case MKTAG('a', 'm', 'v', 'h'):
amv_file_format=1;
@ -410,13 +410,13 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
avio_rl32(pb);
avi->non_interleaved |= avio_rl32(pb) & AVIF_MUSTUSEINDEX;
avio_seek(pb, 2 * 4, SEEK_CUR);
avio_skip(pb, 2 * 4);
avio_rl32(pb);
avio_rl32(pb);
avih_width=avio_rl32(pb);
avih_height=avio_rl32(pb);
avio_seek(pb, size - 10 * 4, SEEK_CUR);
avio_skip(pb, size - 10 * 4);
break;
case MKTAG('s', 't', 'r', 'h'):
/* stream header */
@ -425,7 +425,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
handler = avio_rl32(pb); /* codec tag */
if(tag1 == MKTAG('p', 'a', 'd', 's')){
avio_seek(pb, size - 8, SEEK_CUR);
avio_skip(pb, size - 8);
break;
}else{
stream_index++;
@ -469,10 +469,10 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
goto fail;
}
s->streams[0]->priv_data = ast;
avio_seek(pb, 3 * 4, SEEK_CUR);
avio_skip(pb, 3 * 4);
ast->scale = avio_rl32(pb);
ast->rate = avio_rl32(pb);
avio_seek(pb, 4, SEEK_CUR); /* start time */
avio_skip(pb, 4); /* start time */
dv_dur = avio_rl32(pb);
if (ast->scale > 0 && ast->rate > 0 && dv_dur > 0) {
@ -485,7 +485,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
*/
stream_index = s->nb_streams - 1;
avio_seek(pb, size - 9*4, SEEK_CUR);
avio_skip(pb, size - 9*4);
break;
}
@ -542,12 +542,12 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
if(ast->sample_size == 0)
st->duration = st->nb_frames;
ast->frame_offset= ast->cum_len;
avio_seek(pb, size - 12 * 4, SEEK_CUR);
avio_skip(pb, size - 12 * 4);
break;
case MKTAG('s', 't', 'r', 'f'):
/* stream header */
if (stream_index >= (unsigned)s->nb_streams || avi->dv_demux) {
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
} else {
uint64_t cur_pos = avio_tell(pb);
if (cur_pos < list_end)
@ -560,7 +560,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
st->codec->height=avih_height;
st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = CODEC_ID_AMV;
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
break;
}
tag1 = ff_get_bmp_header(pb, st);
@ -620,7 +620,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
}
st->codec->height= FFABS(st->codec->height);
// avio_seek(pb, size - 5 * 4, SEEK_CUR);
// avio_skip(pb, size - 5 * 4);
break;
case AVMEDIA_TYPE_AUDIO:
ff_get_wav_header(pb, st->codec, size);
@ -630,7 +630,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
ast->sample_size= st->codec->block_align;
}
if (size&1) /* 2-aligned (fix for Stargate SG-1 - 3x18 - Shades of Grey.avi) */
avio_seek(pb, 1, SEEK_CUR);
avio_skip(pb, 1);
/* Force parsing as several audio frames can be in
* one packet and timestamps refer to packet start. */
st->need_parsing = AVSTREAM_PARSE_TIMESTAMPS;
@ -658,7 +658,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
st->codec->codec_type = AVMEDIA_TYPE_DATA;
st->codec->codec_id= CODEC_ID_NONE;
st->codec->codec_tag= 0;
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
break;
}
}
@ -693,7 +693,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
}
size -= 9*4;
}
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
break;
case MKTAG('s', 't', 'r', 'n'):
if(s->nb_streams){
@ -710,7 +710,7 @@ static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
}
/* skip tag */
size += (size & 1);
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
break;
}
}
@ -762,7 +762,7 @@ static int read_gab2_sub(AVStream *st, AVPacket *pkt) {
goto error;
ret = avio_get_str16le(pb, desc_len, desc, sizeof(desc));
avio_seek(pb, desc_len - ret, SEEK_CUR);
avio_skip(pb, desc_len - ret);
if (*desc)
av_metadata_set2(&st->metadata, "title", desc, 0);
@ -1008,14 +1008,14 @@ resync:
//parse JUNK
||(d[0] == 'J' && d[1] == 'U' && d[2] == 'N' && d[3] == 'K')
||(d[0] == 'i' && d[1] == 'd' && d[2] == 'x' && d[3] == '1')){
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
//av_log(s, AV_LOG_DEBUG, "SKIP\n");
goto resync;
}
//parse stray LIST
if(d[0] == 'L' && d[1] == 'I' && d[2] == 'S' && d[3] == 'T'){
avio_seek(pb, 4, SEEK_CUR);
avio_skip(pb, 4);
goto resync;
}
@ -1026,7 +1026,7 @@ resync:
//detect ##ix chunk and skip
if(d[2] == 'i' && d[3] == 'x' && n < s->nb_streams){
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
goto resync;
}
@ -1060,7 +1060,7 @@ resync:
/*|| (st->discard >= AVDISCARD_NONKEY && !(pkt->flags & AV_PKT_FLAG_KEY))*/ //FIXME needs a little reordering
|| st->discard >= AVDISCARD_ALL){
ast->frame_offset += get_duration(ast, size);
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
goto resync;
}
@ -1225,7 +1225,7 @@ static int avi_load_index(AVFormatContext *s)
default:
skip:
size += (size & 1);
if (avio_seek(pb, size, SEEK_CUR) < 0)
if (avio_skip(pb, size) < 0)
goto the_end; // something is wrong here
break;
}

View File

@ -393,7 +393,7 @@ static int avi_write_header(AVFormatContext *s)
avi->movi_list = ff_start_tag(pb, "LIST");
ffio_wfourcc(pb, "movi");
put_flush_packet(pb);
avio_flush(pb);
return 0;
}
@ -438,15 +438,15 @@ static int avi_write_ix(AVFormatContext *s)
avio_wl32(pb, ((uint32_t)ie->len & ~0x80000000) |
(ie->flags & 0x10 ? 0 : 0x80000000));
}
put_flush_packet(pb);
avio_flush(pb);
pos = avio_tell(pb);
/* Updating one entry in the AVI OpenDML master index */
avio_seek(pb, avist->indexes.indx_start - 8, SEEK_SET);
ffio_wfourcc(pb, "indx"); /* enabling this entry */
avio_seek(pb, 8, SEEK_CUR);
avio_skip(pb, 8);
avio_wl32(pb, avi->riff_id); /* nEntriesInUse */
avio_seek(pb, 16*avi->riff_id, SEEK_CUR);
avio_skip(pb, 16*avi->riff_id);
avio_wl64(pb, ix); /* qwOffset */
avio_wl32(pb, pos - ix); /* dwSize */
avio_wl32(pb, avist->indexes.entry); /* dwDuration */
@ -578,7 +578,7 @@ static int avi_write_packet(AVFormatContext *s, AVPacket *pkt)
if (size & 1)
avio_w8(pb, 0);
put_flush_packet(pb);
avio_flush(pb);
return 0;
}
@ -603,7 +603,7 @@ static int avi_write_trailer(AVFormatContext *s)
file_size = avio_tell(pb);
avio_seek(pb, avi->odml_list - 8, SEEK_SET);
ffio_wfourcc(pb, "LIST"); /* Making this AVI OpenDML one */
avio_seek(pb, 16, SEEK_CUR);
avio_skip(pb, 16);
for (n=nb_frames=0;n<s->nb_streams;n++) {
AVCodecContext *stream = s->streams[n]->codec;
@ -624,7 +624,7 @@ static int avi_write_trailer(AVFormatContext *s)
avi_write_counters(s, avi->riff_id);
}
}
put_flush_packet(pb);
avio_flush(pb);
for (i=0; i<s->nb_streams; i++) {
AVIStream *avist= s->streams[i]->priv_data;

View File

@ -429,6 +429,13 @@ attribute_deprecated int64_t url_ftell(AVIOContext *s);
attribute_deprecated int64_t url_fsize(AVIOContext *s);
#define URL_EOF (-1)
attribute_deprecated int url_fgetc(AVIOContext *s);
attribute_deprecated int url_setbufsize(AVIOContext *s, int buf_size);
#ifdef __GNUC__
attribute_deprecated int url_fprintf(AVIOContext *s, const char *fmt, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
attribute_deprecated int url_fprintf(AVIOContext *s, const char *fmt, ...);
#endif
attribute_deprecated void put_flush_packet(AVIOContext *s);
/**
* @}
*/
@ -488,7 +495,10 @@ int64_t avio_skip(AVIOContext *s, int64_t offset);
* ftell() equivalent for AVIOContext.
* @return position or AVERROR.
*/
#define avio_tell(s) avio_seek((s), 0, SEEK_CUR)
static av_always_inline int64_t avio_tell(AVIOContext *s)
{
return avio_seek(s, 0, SEEK_CUR);
}
/**
* Get the filesize.
@ -508,9 +518,9 @@ int64_t av_url_read_fseek(AVIOContext *h, int stream_index,
/** @warning currently size is limited */
#ifdef __GNUC__
int url_fprintf(AVIOContext *s, const char *fmt, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
int avio_printf(AVIOContext *s, const char *fmt, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
int url_fprintf(AVIOContext *s, const char *fmt, ...);
int avio_printf(AVIOContext *s, const char *fmt, ...);
#endif
#if FF_API_OLD_AVIO
@ -519,7 +529,7 @@ int url_fprintf(AVIOContext *s, const char *fmt, ...);
attribute_deprecated char *url_fgets(AVIOContext *s, char *buf, int buf_size);
#endif
void put_flush_packet(AVIOContext *s);
void avio_flush(AVIOContext *s);
/**
@ -570,8 +580,6 @@ unsigned int avio_rb24(AVIOContext *s);
unsigned int avio_rb32(AVIOContext *s);
uint64_t avio_rb64(AVIOContext *s);
uint64_t ff_get_v(AVIOContext *bc);
static inline int url_is_streamed(AVIOContext *s)
{
return s->is_streamed;
@ -590,8 +598,6 @@ static inline int url_is_streamed(AVIOContext *s)
*/
int url_fdopen(AVIOContext **s, URLContext *h);
/** @warning must be called before any I/O */
int url_setbufsize(AVIOContext *s, int buf_size);
#if FF_API_URL_RESETBUF
/** Reset the buffer for reading or writing.
* @note Will drop any data currently in the buffer without transmitting it.
@ -623,12 +629,12 @@ URLContext *url_fileno(AVIOContext *s);
* @deprecated use AVIOContext.max_packet_size directly.
*/
attribute_deprecated int url_fget_max_packet_size(AVIOContext *s);
#endif
int url_open_buf(AVIOContext **s, uint8_t *buf, int buf_size, int flags);
attribute_deprecated int url_open_buf(AVIOContext **s, uint8_t *buf, int buf_size, int flags);
/** return the written or read size */
int url_close_buf(AVIOContext *s);
attribute_deprecated int url_close_buf(AVIOContext *s);
#endif
/**
* Open a write only memory stream.

View File

@ -61,4 +61,9 @@ static av_always_inline void ffio_wfourcc(AVIOContext *pb, const uint8_t *s)
*/
int ffio_rewind_with_probe_data(AVIOContext *s, unsigned char *buf, int buf_size);
uint64_t ffio_read_varlen(AVIOContext *bc);
/** @warning must be called before any I/O */
int ffio_set_buf_size(AVIOContext *s, int buf_size);
#endif // AVFORMAT_AVIO_INTERNAL_H

View File

@ -170,7 +170,7 @@ void avio_write(AVIOContext *s, const unsigned char *buf, int size)
}
}
void put_flush_packet(AVIOContext *s)
void avio_flush(AVIOContext *s)
{
flush_buffer(s);
s->must_flush = 0;
@ -386,6 +386,26 @@ int64_t url_fsize(AVIOContext *s)
{
return avio_size(s);
}
int url_setbufsize(AVIOContext *s, int buf_size)
{
return ffio_set_buf_size(s, buf_size);
}
int url_fprintf(AVIOContext *s, const char *fmt, ...)
{
va_list ap;
char buf[4096];
int ret;
va_start(ap, fmt);
ret = vsnprintf(buf, sizeof(buf), fmt, ap);
va_end(ap);
avio_write(s, buf, strlen(buf));
return ret;
}
void put_flush_packet(AVIOContext *s)
{
avio_flush(s);
}
#endif
int avio_put_str(AVIOContext *s, const char *str)
@ -499,7 +519,7 @@ static void fill_buffer(AVIOContext *s)
/* make buffer smaller in case it ended up large after probing */
if (s->buffer_size > max_buffer_size) {
url_setbufsize(s, max_buffer_size);
ffio_set_buf_size(s, max_buffer_size);
s->checksum_ptr = dst = s->buffer;
len = s->buffer_size;
@ -762,7 +782,7 @@ uint64_t avio_rb64(AVIOContext *s)
return val;
}
uint64_t ff_get_v(AVIOContext *bc){
uint64_t ffio_read_varlen(AVIOContext *bc){
uint64_t val = 0;
int tmp;
@ -810,7 +830,7 @@ int url_fdopen(AVIOContext **s, URLContext *h)
return 0;
}
int url_setbufsize(AVIOContext *s, int buf_size)
int ffio_set_buf_size(AVIOContext *s, int buf_size)
{
uint8_t *buffer;
buffer = av_malloc(buf_size);
@ -917,8 +937,7 @@ URLContext *url_fileno(AVIOContext *s)
return s->opaque;
}
#if CONFIG_MUXERS
int url_fprintf(AVIOContext *s, const char *fmt, ...)
int avio_printf(AVIOContext *s, const char *fmt, ...)
{
va_list ap;
char buf[4096];
@ -930,7 +949,6 @@ int url_fprintf(AVIOContext *s, const char *fmt, ...)
avio_write(s, buf, strlen(buf));
return ret;
}
#endif //CONFIG_MUXERS
#if FF_API_OLD_AVIO
char *url_fgets(AVIOContext *s, char *buf, int buf_size)
@ -991,6 +1009,7 @@ int64_t av_url_read_fseek(AVIOContext *s, int stream_index,
* back to the server even if CONFIG_MUXERS is false. */
#if CONFIG_MUXERS || CONFIG_NETWORK
/* buffer handling */
#if FF_API_OLD_AVIO
int url_open_buf(AVIOContext **s, uint8_t *buf, int buf_size, int flags)
{
int ret;
@ -1007,9 +1026,10 @@ int url_open_buf(AVIOContext **s, uint8_t *buf, int buf_size, int flags)
int url_close_buf(AVIOContext *s)
{
put_flush_packet(s);
avio_flush(s);
return s->buf_ptr - s->buffer;
}
#endif
/* output in a dynamic buffer */
@ -1134,7 +1154,7 @@ int url_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer)
padding = FF_INPUT_BUFFER_PADDING_SIZE;
}
put_flush_packet(s);
avio_flush(s);
*pbuffer = d->buffer;
size = d->size;

View File

@ -61,7 +61,7 @@ static int avs_read_header(AVFormatContext * s, AVFormatParameters * ap)
s->ctx_flags |= AVFMTCTX_NOHEADER;
avio_seek(s->pb, 4, SEEK_CUR);
avio_skip(s->pb, 4);
avs->width = avio_rl16(s->pb);
avs->height = avio_rl16(s->pb);
avs->bits_per_sample = avio_rl16(s->pb);
@ -204,7 +204,7 @@ static int avs_read_packet(AVFormatContext * s, AVPacket * pkt)
break;
default:
avio_seek(s->pb, size - 4, SEEK_CUR);
avio_skip(s->pb, size - 4);
}
}
}

View File

@ -67,7 +67,7 @@ static int vid_read_header(AVFormatContext *s,
* bytes: 'V' 'I' 'D'
* int16s: always_512, nframes, width, height, delay, always_14
*/
avio_seek(pb, 5, SEEK_CUR);
avio_skip(pb, 5);
vid->nframes = avio_rl16(pb);
stream = av_new_stream(s, 0);

View File

@ -65,19 +65,19 @@ static int bfi_read_header(AVFormatContext * s, AVFormatParameters * ap)
return AVERROR(ENOMEM);
/* Set the total number of frames. */
avio_seek(pb, 8, SEEK_CUR);
avio_skip(pb, 8);
chunk_header = avio_rl32(pb);
bfi->nframes = avio_rl32(pb);
avio_rl32(pb);
avio_rl32(pb);
avio_rl32(pb);
fps = avio_rl32(pb);
avio_seek(pb, 12, SEEK_CUR);
avio_skip(pb, 12);
vstream->codec->width = avio_rl32(pb);
vstream->codec->height = avio_rl32(pb);
/*Load the palette to extradata */
avio_seek(pb, 8, SEEK_CUR);
avio_skip(pb, 8);
vstream->codec->extradata = av_malloc(768);
vstream->codec->extradata_size = 768;
avio_read(pb, vstream->codec->extradata,

View File

@ -98,7 +98,7 @@ static int read_header(AVFormatContext *s, AVFormatParameters *ap)
return AVERROR(EIO);
}
avio_seek(pb, 4, SEEK_CUR);
avio_skip(pb, 4);
vst->codec->width = avio_rl32(pb);
vst->codec->height = avio_rl32(pb);
@ -127,7 +127,7 @@ static int read_header(AVFormatContext *s, AVFormatParameters *ap)
}
if (bink->num_audio_tracks) {
avio_seek(pb, 4 * bink->num_audio_tracks, SEEK_CUR);
avio_skip(pb, 4 * bink->num_audio_tracks);
for (i = 0; i < bink->num_audio_tracks; i++) {
ast = av_new_stream(s, 1);
@ -169,7 +169,7 @@ static int read_header(AVFormatContext *s, AVFormatParameters *ap)
keyframe ? AVINDEX_KEYFRAME : 0);
}
avio_seek(pb, 4, SEEK_CUR);
avio_skip(pb, 4);
bink->current_track = -1;
return 0;
@ -225,7 +225,7 @@ static int read_packet(AVFormatContext *s, AVPacket *pkt)
AV_RL32(pkt->data) / (2 * s->streams[bink->current_track]->codec->channels);
return 0;
} else {
avio_seek(pb, audio_size, SEEK_CUR);
avio_skip(pb, audio_size);
}
}

View File

@ -122,7 +122,7 @@ static int read_packet(AVFormatContext *s, AVPacket *pkt)
return AVERROR(ENOMEM);
c93->audio->codec->codec_type = AVMEDIA_TYPE_AUDIO;
}
avio_seek(pb, 26, SEEK_CUR); /* VOC header */
avio_skip(pb, 26); /* VOC header */
ret = voc_get_packet(s, pkt, c93->audio, datasize - 26);
if (ret > 0) {
pkt->stream_index = 1;

View File

@ -114,22 +114,22 @@ static int read_kuki_chunk(AVFormatContext *s, int64_t size)
av_log(s, AV_LOG_ERROR, "invalid AAC magic cookie\n");
return AVERROR_INVALIDDATA;
}
avio_seek(pb, skip, SEEK_CUR);
avio_skip(pb, skip);
} else if (st->codec->codec_id == CODEC_ID_ALAC) {
#define ALAC_PREAMBLE 12
#define ALAC_HEADER 36
if (size < ALAC_PREAMBLE + ALAC_HEADER) {
av_log(s, AV_LOG_ERROR, "invalid ALAC magic cookie\n");
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
return AVERROR_INVALIDDATA;
}
avio_seek(pb, ALAC_PREAMBLE, SEEK_CUR);
avio_skip(pb, ALAC_PREAMBLE);
st->codec->extradata = av_mallocz(ALAC_HEADER + FF_INPUT_BUFFER_PADDING_SIZE);
if (!st->codec->extradata)
return AVERROR(ENOMEM);
avio_read(pb, st->codec->extradata, ALAC_HEADER);
st->codec->extradata_size = ALAC_HEADER;
avio_seek(pb, size - ALAC_PREAMBLE - ALAC_HEADER, SEEK_CUR);
avio_skip(pb, size - ALAC_PREAMBLE - ALAC_HEADER);
} else {
st->codec->extradata = av_mallocz(size + FF_INPUT_BUFFER_PADDING_SIZE);
if (!st->codec->extradata)
@ -201,7 +201,7 @@ static int read_header(AVFormatContext *s,
int found_data, ret;
int64_t size;
avio_seek(pb, 8, SEEK_CUR); /* magic, version, file flags */
avio_skip(pb, 8); /* magic, version, file flags */
/* audio description chunk */
if (avio_rb32(pb) != MKBETAG('d','e','s','c')) {
@ -233,11 +233,11 @@ static int read_header(AVFormatContext *s,
switch (tag) {
case MKBETAG('d','a','t','a'):
avio_seek(pb, 4, SEEK_CUR); /* edit count */
avio_skip(pb, 4); /* edit count */
caf->data_start = avio_tell(pb);
caf->data_size = size < 0 ? -1 : size - 4;
if (caf->data_size > 0 && !url_is_streamed(pb))
avio_seek(pb, caf->data_size, SEEK_CUR);
avio_skip(pb, caf->data_size);
found_data = 1;
break;
@ -265,7 +265,7 @@ static int read_header(AVFormatContext *s,
case MKBETAG('f','r','e','e'):
if (size < 0)
return AVERROR_INVALIDDATA;
avio_seek(pb, size, SEEK_CUR);
avio_skip(pb, size);
break;
}
}

View File

@ -50,7 +50,7 @@ static int crc_write_trailer(struct AVFormatContext *s)
snprintf(buf, sizeof(buf), "CRC=0x%08x\n", crc->crcval);
avio_write(s->pb, buf, strlen(buf));
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -60,7 +60,7 @@ static int daud_write_packet(struct AVFormatContext *s, AVPacket *pkt)
avio_wb16(s->pb, pkt->size);
avio_wb16(s->pb, 0x8010); // unknown
avio_write(s->pb, pkt->data, pkt->size);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -382,7 +382,7 @@ static int dv_write_packet(struct AVFormatContext *s, AVPacket *pkt)
pkt->data, pkt->size, &frame);
if (fsize > 0) {
avio_write(s->pb, frame, fsize);
put_flush_packet(s->pb);
avio_flush(s->pb);
}
return 0;
}

View File

@ -96,7 +96,7 @@ static int dxa_read_header(AVFormatContext *s, AVFormatParameters *ap)
c->has_sound = 1;
size = avio_rb32(pb);
c->vidpos = avio_tell(pb) + size;
avio_seek(pb, 16, SEEK_CUR);
avio_skip(pb, 16);
fsize = avio_rl32(pb);
ast = av_new_stream(s, 0);
@ -108,7 +108,7 @@ static int dxa_read_header(AVFormatContext *s, AVFormatParameters *ap)
tag = avio_rl32(pb);
fsize = avio_rl32(pb);
if(tag == MKTAG('d', 'a', 't', 'a')) break;
avio_seek(pb, fsize, SEEK_CUR);
avio_skip(pb, fsize);
}
c->bpc = (fsize + c->frames - 1) / c->frames;
if(ast->codec->block_align)

View File

@ -62,7 +62,7 @@ static int cdata_read_header(AVFormatContext *s, AVFormatParameters *ap)
};
sample_rate = avio_rb16(pb);
avio_seek(pb, 12, SEEK_CUR);
avio_skip(pb, 12);
st = av_new_stream(s, 0);
if (!st)

View File

@ -222,7 +222,7 @@ static int process_audio_header_eacs(AVFormatContext *s)
ea->bytes = avio_r8(pb); /* 1=8-bit, 2=16-bit */
ea->num_channels = avio_r8(pb);
compression_type = avio_r8(pb);
avio_seek(pb, 13, SEEK_CUR);
avio_skip(pb, 13);
switch (compression_type) {
case 0:
@ -261,7 +261,7 @@ static int process_video_header_mdec(AVFormatContext *s)
{
EaDemuxContext *ea = s->priv_data;
AVIOContext *pb = s->pb;
avio_seek(pb, 4, SEEK_CUR);
avio_skip(pb, 4);
ea->width = avio_rl16(pb);
ea->height = avio_rl16(pb);
ea->time_base = (AVRational){1,15};
@ -274,7 +274,7 @@ static int process_video_header_vp6(AVFormatContext *s)
EaDemuxContext *ea = s->priv_data;
AVIOContext *pb = s->pb;
avio_seek(pb, 16, SEEK_CUR);
avio_skip(pb, 16);
ea->time_base.den = avio_rl32(pb);
ea->time_base.num = avio_rl32(pb);
ea->video_codec = CODEC_ID_VP6;
@ -316,7 +316,7 @@ static int process_ea_header(AVFormatContext *s) {
case SHEN_TAG :
blockid = avio_rl32(pb);
if (blockid == GSTR_TAG) {
avio_seek(pb, 4, SEEK_CUR);
avio_skip(pb, 4);
} else if ((blockid & 0xFFFF)!=PT00_TAG) {
av_log (s, AV_LOG_ERROR, "unknown SCHl headerid\n");
return 0;
@ -474,19 +474,19 @@ static int ea_read_packet(AVFormatContext *s,
/* audio data */
case ISNh_TAG:
/* header chunk also contains data; skip over the header portion*/
avio_seek(pb, 32, SEEK_CUR);
avio_skip(pb, 32);
chunk_size -= 32;
case ISNd_TAG:
case SCDl_TAG:
case SNDC_TAG:
case SDEN_TAG:
if (!ea->audio_codec) {
avio_seek(pb, chunk_size, SEEK_CUR);
avio_skip(pb, chunk_size);
break;
} else if (ea->audio_codec == CODEC_ID_PCM_S16LE_PLANAR ||
ea->audio_codec == CODEC_ID_MP3) {
num_samples = avio_rl32(pb);
avio_seek(pb, 8, SEEK_CUR);
avio_skip(pb, 8);
chunk_size -= 12;
}
ret = av_get_packet(pb, pkt, chunk_size);
@ -541,7 +541,7 @@ static int ea_read_packet(AVFormatContext *s,
goto get_video_packet;
case mTCD_TAG:
avio_seek(pb, 8, SEEK_CUR); // skip ea dct header
avio_skip(pb, 8); // skip ea dct header
chunk_size -= 8;
goto get_video_packet;
@ -560,7 +560,7 @@ get_video_packet:
break;
default:
avio_seek(pb, chunk_size, SEEK_CUR);
avio_skip(pb, chunk_size);
break;
}
}

View File

@ -187,7 +187,7 @@ static int64_t get_dts(AVFormatContext *s, int64_t pos)
int64_t dts;
ffm_seek1(s, pos);
avio_seek(pb, 4, SEEK_CUR);
avio_skip(pb, 4);
dts = avio_rb64(pb);
#ifdef DEBUG_SEEK
av_log(s, AV_LOG_DEBUG, "dts=%0.6f\n", dts / 1000000.0);

View File

@ -44,7 +44,7 @@ static void flush_packet(AVFormatContext *s)
h |= 0x8000;
avio_wb16(pb, h);
avio_write(pb, ffm->packet, ffm->packet_end - ffm->packet);
put_flush_packet(pb);
avio_flush(pb);
/* prepare next packet */
ffm->frame_offset = 0; /* no key frame */
@ -187,7 +187,7 @@ static int ffm_write_header(AVFormatContext *s)
while ((avio_tell(pb) % ffm->packet_size) != 0)
avio_w8(pb, 0);
put_flush_packet(pb);
avio_flush(pb);
/* init packet mux */
ffm->packet_ptr = ffm->packet;
@ -235,7 +235,7 @@ static int ffm_write_trailer(AVFormatContext *s)
if (ffm->packet_ptr > ffm->packet)
flush_packet(s);
put_flush_packet(pb);
avio_flush(pb);
return 0;
}

View File

@ -53,7 +53,7 @@ static int write_header(AVFormatContext *s)
avio_write(s->pb, ID_STRING, sizeof(ID_STRING) - 1);
avio_w8(s->pb, '1'); // version
avio_w8(s->pb, '\n');
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}
@ -73,13 +73,13 @@ static int write_trailer(AVFormatContext *s)
AVChapter *ch = s->chapters[i];
avio_write(s->pb, ID_CHAPTER, sizeof(ID_CHAPTER) - 1);
avio_w8(s->pb, '\n');
url_fprintf(s->pb, "TIMEBASE=%d/%d\n", ch->time_base.num, ch->time_base.den);
url_fprintf(s->pb, "START=%"PRId64"\n", ch->start);
url_fprintf(s->pb, "END=%"PRId64"\n", ch->end);
avio_printf(s->pb, "TIMEBASE=%d/%d\n", ch->time_base.num, ch->time_base.den);
avio_printf(s->pb, "START=%"PRId64"\n", ch->start);
avio_printf(s->pb, "END=%"PRId64"\n", ch->end);
write_tags(s->pb, ch->metadata);
}
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -59,7 +59,7 @@ static int read_header(AVFormatContext *s,
return AVERROR_INVALIDDATA;
}
avio_seek(pb, 2, SEEK_CUR);
avio_skip(pb, 2);
st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = CODEC_ID_RAWVIDEO;
st->codec->pix_fmt = PIX_FMT_RGBA;
@ -84,7 +84,7 @@ static int read_packet(AVFormatContext *s,
return AVERROR(EIO);
pkt->dts = avio_tell(s->pb) / (st->codec->width * (st->codec->height + film->leading) * 4);
pkt->size = av_get_packet(s->pb, pkt, st->codec->width * st->codec->height * 4);
avio_seek(s->pb, st->codec->width * film->leading * 4, SEEK_CUR);
avio_skip(s->pb, st->codec->width * film->leading * 4);
if (pkt->size < 0)
return pkt->size;
pkt->flags |= AV_PKT_FLAG_KEY;

View File

@ -67,7 +67,7 @@ static int write_trailer(AVFormatContext *s)
avio_wb16(pb, 1/av_q2d(st->codec->time_base));
for (i = 0; i < 16; i++)
avio_w8(pb, 0x00); // reserved
put_flush_packet(pb);
avio_flush(pb);
return 0;
}

View File

@ -65,7 +65,7 @@ static int flac_read_header(AVFormatContext *s,
break;
/* skip metadata block for unsupported types */
default:
ret = avio_seek(s->pb, metadata_size, SEEK_CUR);
ret = avio_skip(s->pb, metadata_size);
if (ret < 0)
return ret;
}

View File

@ -104,7 +104,7 @@ static int flac_write_trailer(struct AVFormatContext *s)
avio_seek(pb, 8, SEEK_SET);
avio_write(pb, streaminfo, FLAC_STREAMINFO_SIZE);
avio_seek(pb, file_size, SEEK_SET);
put_flush_packet(pb);
avio_flush(pb);
} else {
av_log(s, AV_LOG_WARNING, "unable to rewrite FLAC header.\n");
}
@ -114,7 +114,7 @@ static int flac_write_trailer(struct AVFormatContext *s)
static int flac_write_packet(struct AVFormatContext *s, AVPacket *pkt)
{
avio_write(s->pb, pkt->data, pkt->size);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -239,7 +239,7 @@ static int flic_read_packet(AVFormatContext *s,
}
/* skip useless 10B sub-header (yes, it's not accounted for in the chunk header) */
avio_seek(pb, 10, SEEK_CUR);
avio_skip(pb, 10);
pkt->stream_index = flic->audio_stream_index;
pkt->pos = avio_tell(pb);
@ -253,7 +253,7 @@ static int flic_read_packet(AVFormatContext *s,
packet_read = 1;
} else {
/* not interested in this chunk */
avio_seek(pb, size - 6, SEEK_CUR);
avio_skip(pb, size - 6);
}
}

View File

@ -113,7 +113,7 @@ static int flv_set_video_codec(AVFormatContext *s, AVStream *vstream, int flv_co
static int amf_get_string(AVIOContext *ioc, char *buffer, int buffsize) {
int length = avio_rb16(ioc);
if(length >= buffsize) {
avio_seek(ioc, length, SEEK_CUR);
avio_skip(ioc, length);
return -1;
}
@ -149,7 +149,7 @@ static int amf_parse_object(AVFormatContext *s, AVStream *astream, AVStream *vst
unsigned int keylen;
while(avio_tell(ioc) < max_pos - 2 && (keylen = avio_rb16(ioc))) {
avio_seek(ioc, keylen, SEEK_CUR); //skip key string
avio_skip(ioc, keylen); //skip key string
if(amf_parse_object(s, NULL, NULL, NULL, max_pos, depth + 1) < 0)
return -1; //if we couldn't skip, bomb out.
}
@ -162,7 +162,7 @@ static int amf_parse_object(AVFormatContext *s, AVStream *astream, AVStream *vst
case AMF_DATA_TYPE_UNSUPPORTED:
break; //these take up no additional space
case AMF_DATA_TYPE_MIXEDARRAY:
avio_seek(ioc, 4, SEEK_CUR); //skip 32-bit max array index
avio_skip(ioc, 4); //skip 32-bit max array index
while(avio_tell(ioc) < max_pos - 2 && amf_get_string(ioc, str_val, sizeof(str_val)) > 0) {
//this is the only case in which we would want a nested parse to not skip over the object
if(amf_parse_object(s, astream, vstream, str_val, max_pos, depth + 1) < 0)
@ -182,7 +182,7 @@ static int amf_parse_object(AVFormatContext *s, AVStream *astream, AVStream *vst
}
break;
case AMF_DATA_TYPE_DATE:
avio_seek(ioc, 8 + 2, SEEK_CUR); //timestamp (double) and UTC offset (int16)
avio_skip(ioc, 8 + 2); //timestamp (double) and UTC offset (int16)
break;
default: //unsupported type, we couldn't skip
return -1;
@ -254,7 +254,7 @@ static int flv_read_header(AVFormatContext *s,
{
int offset, flags;
avio_seek(s->pb, 4, SEEK_CUR);
avio_skip(s->pb, 4);
flags = avio_r8(s->pb);
/* old flvtool cleared this field */
/* FIXME: better fix needed */
@ -278,7 +278,7 @@ static int flv_read_header(AVFormatContext *s,
offset = avio_rb32(s->pb);
avio_seek(s->pb, offset, SEEK_SET);
avio_seek(s->pb, 4, SEEK_CUR);
avio_skip(s->pb, 4);
s->start_time = 0;
@ -304,7 +304,7 @@ static int flv_read_packet(AVFormatContext *s, AVPacket *pkt)
int64_t dts, pts = AV_NOPTS_VALUE;
AVStream *st = NULL;
for(;;avio_seek(s->pb, 4, SEEK_CUR)){ /* pkt size is repeated at end. skip it */
for(;;avio_skip(s->pb, 4)){ /* pkt size is repeated at end. skip it */
pos = avio_tell(s->pb);
type = avio_r8(s->pb);
size = avio_rb24(s->pb);
@ -313,7 +313,7 @@ static int flv_read_packet(AVFormatContext *s, AVPacket *pkt)
// av_log(s, AV_LOG_DEBUG, "type:%d, size:%d, dts:%d\n", type, size, dts);
if (url_feof(s->pb))
return AVERROR_EOF;
avio_seek(s->pb, 3, SEEK_CUR); /* stream id, always 0 */
avio_skip(s->pb, 3); /* stream id, always 0 */
flags = 0;
if(size == 0)
@ -454,7 +454,7 @@ static int flv_read_packet(AVFormatContext *s, AVPacket *pkt)
pkt->flags |= AV_PKT_FLAG_KEY;
leave:
avio_seek(s->pb, 4, SEEK_CUR);
avio_skip(s->pb, 4);
return ret;
}

View File

@ -291,7 +291,7 @@ static int flv_write_header(AVFormatContext *s)
data_size= avio_tell(pb) - metadata_size_pos - 10;
avio_seek(pb, metadata_size_pos, SEEK_SET);
avio_wb24(pb, data_size);
avio_seek(pb, data_size + 10 - 3, SEEK_CUR);
avio_skip(pb, data_size + 10 - 3);
avio_wb32(pb, data_size + 11);
for (i = 0; i < s->nb_streams; i++) {
@ -318,7 +318,7 @@ static int flv_write_header(AVFormatContext *s)
data_size = avio_tell(pb) - pos;
avio_seek(pb, -data_size - 10, SEEK_CUR);
avio_wb24(pb, data_size);
avio_seek(pb, data_size + 10 - 3, SEEK_CUR);
avio_skip(pb, data_size + 10 - 3);
avio_wb32(pb, data_size + 11); // previous tag size
}
}
@ -430,7 +430,7 @@ static int flv_write_packet(AVFormatContext *s, AVPacket *pkt)
avio_wb32(pb,size+flags_size+11); // previous tag size
flv->duration = FFMAX(flv->duration, pkt->pts + flv->delay + pkt->duration);
put_flush_packet(pb);
avio_flush(pb);
av_free(data);

View File

@ -29,7 +29,7 @@ static int framecrc_write_packet(struct AVFormatContext *s, AVPacket *pkt)
snprintf(buf, sizeof(buf), "%d, %"PRId64", %d, 0x%08x\n", pkt->stream_index, pkt->dts, pkt->size, crc);
avio_write(s->pb, buf, strlen(buf));
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -287,7 +287,7 @@ static int gif_write_header(AVFormatContext *s)
gif_image_write_header(pb, width, height, loop_count, NULL);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}
@ -322,7 +322,7 @@ static int gif_write_video(AVFormatContext *s,
gif_image_write_image(pb, 0, 0, enc->width, enc->height,
buf, enc->width * 3, PIX_FMT_RGB24);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}
@ -340,7 +340,7 @@ static int gif_write_trailer(AVFormatContext *s)
AVIOContext *pb = s->pb;
avio_w8(pb, 0x3b);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -174,7 +174,7 @@ static void gxf_material_tags(AVIOContext *pb, int *len, struct gxf_stream_info
else if (tag == MAT_LAST_FIELD)
si->last_field = value;
} else
avio_seek(pb, tlen, SEEK_CUR);
avio_skip(pb, tlen);
}
}
@ -223,7 +223,7 @@ static void gxf_track_tags(AVIOContext *pb, int *len, struct gxf_stream_info *si
else if (tag == TRACK_FPF && (value == 1 || value == 2))
si->fields_per_frame = value;
} else
avio_seek(pb, tlen, SEEK_CUR);
avio_skip(pb, tlen);
}
}
@ -238,7 +238,7 @@ static void gxf_read_index(AVFormatContext *s, int pkt_len) {
int i;
pkt_len -= 8;
if (s->flags & AVFMT_FLAG_IGNIDX) {
avio_seek(pb, pkt_len, SEEK_CUR);
avio_skip(pb, pkt_len);
return;
}
if (map_cnt > 1000) {
@ -247,7 +247,7 @@ static void gxf_read_index(AVFormatContext *s, int pkt_len) {
}
if (pkt_len < 4 * map_cnt) {
av_log(s, AV_LOG_ERROR, "invalid index length\n");
avio_seek(pb, pkt_len, SEEK_CUR);
avio_skip(pb, pkt_len);
return;
}
pkt_len -= 4 * map_cnt;
@ -255,7 +255,7 @@ static void gxf_read_index(AVFormatContext *s, int pkt_len) {
for (i = 0; i < map_cnt; i++)
av_add_index_entry(st, (uint64_t)avio_rl32(pb) * 1024,
i * (uint64_t)fields_per_map + 1, 0, 0, 0);
avio_seek(pb, pkt_len, SEEK_CUR);
avio_skip(pb, pkt_len);
}
static int gxf_header(AVFormatContext *s, AVFormatParameters *ap) {
@ -283,7 +283,7 @@ static int gxf_header(AVFormatContext *s, AVFormatParameters *ap) {
}
map_len -= len;
gxf_material_tags(pb, &len, &si);
avio_seek(pb, len, SEEK_CUR);
avio_skip(pb, len);
map_len -= 2;
len = avio_rb16(pb); // length of track description
if (len > map_len) {
@ -301,7 +301,7 @@ static int gxf_header(AVFormatContext *s, AVFormatParameters *ap) {
track_len = avio_rb16(pb);
len -= track_len;
gxf_track_tags(pb, &track_len, &si);
avio_seek(pb, track_len, SEEK_CUR);
avio_skip(pb, track_len);
if (!(track_type & 0x80)) {
av_log(s, AV_LOG_ERROR, "invalid track type %x\n", track_type);
continue;
@ -326,7 +326,7 @@ static int gxf_header(AVFormatContext *s, AVFormatParameters *ap) {
if (len < 0)
av_log(s, AV_LOG_ERROR, "invalid track description length specified\n");
if (map_len)
avio_seek(pb, map_len, SEEK_CUR);
avio_skip(pb, map_len);
if (!parse_packet_header(pb, &pkt_type, &len)) {
av_log(s, AV_LOG_ERROR, "sync lost in header\n");
return -1;
@ -342,8 +342,8 @@ static int gxf_header(AVFormatContext *s, AVFormatParameters *ap) {
if (len >= 0x39) {
AVRational fps;
len -= 0x39;
avio_seek(pb, 5, SEEK_CUR); // preamble
avio_seek(pb, 0x30, SEEK_CUR); // payload description
avio_skip(pb, 5); // preamble
avio_skip(pb, 0x30); // payload description
fps = fps_umf2avr(avio_rl32(pb));
if (!main_timebase.num || !main_timebase.den) {
// this may not always be correct, but simply the best we can get
@ -354,7 +354,7 @@ static int gxf_header(AVFormatContext *s, AVFormatParameters *ap) {
av_log(s, AV_LOG_INFO, "UMF packet too short\n");
} else
av_log(s, AV_LOG_INFO, "UMF packet missing\n");
avio_seek(pb, len, SEEK_CUR);
avio_skip(pb, len);
// set a fallback value, 60000/1001 is specified for audio-only files
// so use that regardless of why we do not know the video frame rate.
if (!main_timebase.num || !main_timebase.den)
@ -437,7 +437,7 @@ static int gxf_packet(AVFormatContext *s, AVPacket *pkt) {
continue;
}
if (pkt_type != PKT_MEDIA) {
avio_seek(pb, pkt_len, SEEK_CUR);
avio_skip(pb, pkt_len);
continue;
}
if (pkt_len < 16) {
@ -462,7 +462,7 @@ static int gxf_packet(AVFormatContext *s, AVPacket *pkt) {
int last = field_info & 0xffff; // last is exclusive
int bps = av_get_bits_per_sample(st->codec->codec_id)>>3;
if (first <= last && last*bps <= pkt_len) {
avio_seek(pb, first*bps, SEEK_CUR);
avio_skip(pb, first*bps);
skip = pkt_len - last*bps;
pkt_len = (last-first)*bps;
} else
@ -470,7 +470,7 @@ static int gxf_packet(AVFormatContext *s, AVPacket *pkt) {
}
ret = av_get_packet(pb, pkt, pkt_len);
if (skip)
avio_seek(pb, skip, SEEK_CUR);
avio_skip(pb, skip);
pkt->stream_index = stream_index;
pkt->dts = field_nr;
return ret;

View File

@ -753,7 +753,7 @@ static int gxf_write_header(AVFormatContext *s)
gxf->packet_count = 3;
put_flush_packet(pb);
avio_flush(pb);
return 0;
}
@ -781,12 +781,12 @@ static int gxf_write_trailer(AVFormatContext *s)
gxf_write_map_packet(s, 1);
gxf_write_flt_packet(s);
gxf_write_umf_packet(s);
put_flush_packet(pb);
avio_flush(pb);
/* update duration in all map packets */
for (i = 1; i < gxf->map_offsets_nb; i++) {
avio_seek(pb, gxf->map_offsets[i], SEEK_SET);
gxf_write_map_packet(s, 1);
put_flush_packet(pb);
avio_flush(pb);
}
avio_seek(pb, end, SEEK_SET);
@ -895,7 +895,7 @@ static int gxf_write_packet(AVFormatContext *s, AVPacket *pkt)
gxf->packet_count = 0;
}
put_flush_packet(pb);
avio_flush(pb);
return 0;
}

View File

@ -136,6 +136,52 @@ static void read_ttag(AVFormatContext *s, AVIOContext *pb, int taglen, const cha
av_metadata_set2(&s->metadata, key, val, AV_METADATA_DONT_OVERWRITE);
}
static int is_number(const char *str)
{
while (*str >= '0' && *str <= '9') str++;
return !*str;
}
static AVMetadataTag* get_date_tag(AVMetadata *m, const char *tag)
{
AVMetadataTag *t;
if ((t = av_metadata_get(m, tag, NULL, AV_METADATA_MATCH_CASE)) &&
strlen(t->value) == 4 && is_number(t->value))
return t;
return NULL;
}
static void merge_date(AVMetadata **m)
{
AVMetadataTag *t;
char date[17] = {0}; // YYYY-MM-DD hh:mm
if (!(t = get_date_tag(*m, "TYER")) &&
!(t = get_date_tag(*m, "TYE")))
return;
av_strlcpy(date, t->value, 5);
av_metadata_set2(m, "TYER", NULL, 0);
av_metadata_set2(m, "TYE", NULL, 0);
if (!(t = get_date_tag(*m, "TDAT")) &&
!(t = get_date_tag(*m, "TDA")))
goto finish;
snprintf(date + 4, sizeof(date) - 4, "-%.2s-%.2s", t->value + 2, t->value);
av_metadata_set2(m, "TDAT", NULL, 0);
av_metadata_set2(m, "TDA", NULL, 0);
if (!(t = get_date_tag(*m, "TIME")) &&
!(t = get_date_tag(*m, "TIM")))
goto finish;
snprintf(date + 10, sizeof(date) - 10, " %.2s:%.2s", t->value, t->value + 2);
av_metadata_set2(m, "TIME", NULL, 0);
av_metadata_set2(m, "TIM", NULL, 0);
finish:
if (date[0])
av_metadata_set2(m, "date", date, 0);
}
static void ff_id3v2_parse(AVFormatContext *s, int len, uint8_t version, uint8_t flags)
{
int isv34, unsync;
@ -172,7 +218,7 @@ static void ff_id3v2_parse(AVFormatContext *s, int len, uint8_t version, uint8_t
unsync = flags & 0x80;
if (isv34 && flags & 0x40) /* Extended header present, just skip over it */
avio_seek(s->pb, get_size(s->pb, 4), SEEK_CUR);
avio_skip(s->pb, get_size(s->pb, 4));
while (len >= taghdrlen) {
unsigned int tflags;
@ -208,7 +254,7 @@ static void ff_id3v2_parse(AVFormatContext *s, int len, uint8_t version, uint8_t
if (tflags & (ID3v2_FLAG_ENCRYPTION | ID3v2_FLAG_COMPRESSION)) {
av_log(s, AV_LOG_WARNING, "Skipping encrypted/compressed ID3v2 frame %s.\n", tag);
avio_seek(s->pb, tlen, SEEK_CUR);
avio_skip(s->pb, tlen);
} else if (tag[0] == 'T') {
if (unsync || tunsync) {
int i, j;
@ -229,7 +275,7 @@ static void ff_id3v2_parse(AVFormatContext *s, int len, uint8_t version, uint8_t
else if (!tag[0]) {
if (tag[1])
av_log(s, AV_LOG_WARNING, "invalid frame id, assuming padding");
avio_seek(s->pb, tlen, SEEK_CUR);
avio_skip(s->pb, tlen);
break;
}
/* Skip to end of tag */
@ -238,10 +284,10 @@ static void ff_id3v2_parse(AVFormatContext *s, int len, uint8_t version, uint8_t
if (len > 0) {
/* Skip padding */
avio_seek(s->pb, len, SEEK_CUR);
avio_skip(s->pb, len);
}
if (version == 4 && flags & 0x10) /* Footer preset, always 10 bytes, skip over it */
avio_seek(s->pb, 10, SEEK_CUR);
avio_skip(s->pb, 10);
av_free(buffer);
return;
@ -280,6 +326,7 @@ void ff_id3v2_read(AVFormatContext *s, const char *magic)
ff_metadata_conv(&s->metadata, NULL, ff_id3v2_34_metadata_conv);
ff_metadata_conv(&s->metadata, NULL, ff_id3v2_2_metadata_conv);
ff_metadata_conv(&s->metadata, NULL, ff_id3v2_4_metadata_conv);
merge_date(&s->metadata);
}
const AVMetadataConv ff_id3v2_34_metadata_conv[] = {

View File

@ -257,7 +257,7 @@ static int idcin_read_packet(AVFormatContext *s,
chunk_size = avio_rl32(pb);
/* skip the number of decoded bytes (always equal to width * height) */
avio_seek(pb, 4, SEEK_CUR);
avio_skip(pb, 4);
chunk_size -= 4;
ret= av_get_packet(pb, pkt, chunk_size);
if (ret < 0)

View File

@ -136,14 +136,14 @@ static int roq_read_packet(AVFormatContext *s,
break;
}
/* don't care about this chunk anymore */
avio_seek(pb, RoQ_CHUNK_PREAMBLE_SIZE, SEEK_CUR);
avio_skip(pb, RoQ_CHUNK_PREAMBLE_SIZE);
break;
case RoQ_QUAD_CODEBOOK:
/* packet needs to contain both this codebook and next VQ chunk */
codebook_offset = avio_tell(pb) - RoQ_CHUNK_PREAMBLE_SIZE;
codebook_size = chunk_size;
avio_seek(pb, codebook_size, SEEK_CUR);
avio_skip(pb, codebook_size);
if (avio_read(pb, preamble, RoQ_CHUNK_PREAMBLE_SIZE) !=
RoQ_CHUNK_PREAMBLE_SIZE)
return AVERROR(EIO);

View File

@ -30,7 +30,7 @@ static int roq_write_header(struct AVFormatContext *s)
};
avio_write(s->pb, header, 8);
put_flush_packet(s->pb);
avio_flush(s->pb);
return 0;
}

View File

@ -134,7 +134,7 @@ static int iff_read_header(AVFormatContext *s,
return AVERROR(ENOMEM);
st->codec->channels = 1;
avio_seek(pb, 8, SEEK_CUR);
avio_skip(pb, 8);
// codec_tag used by ByteRun1 decoder to distinguish progressive (PBM) and interlaced (ILBM) content
st->codec->codec_tag = avio_rl32(pb);
@ -152,10 +152,10 @@ static int iff_read_header(AVFormatContext *s,
if (data_size < 14)
return AVERROR_INVALIDDATA;
avio_seek(pb, 12, SEEK_CUR);
avio_skip(pb, 12);
st->codec->sample_rate = avio_rb16(pb);
if (data_size >= 16) {
avio_seek(pb, 1, SEEK_CUR);
avio_skip(pb, 1);
compression = avio_r8(pb);
}
break;
@ -186,14 +186,14 @@ static int iff_read_header(AVFormatContext *s,
return AVERROR_INVALIDDATA;
st->codec->width = avio_rb16(pb);
st->codec->height = avio_rb16(pb);
avio_seek(pb, 4, SEEK_CUR); // x, y offset
avio_skip(pb, 4); // x, y offset
st->codec->bits_per_coded_sample = avio_r8(pb);
if (data_size >= 11) {
avio_seek(pb, 1, SEEK_CUR); // masking
avio_skip(pb, 1); // masking
compression = avio_r8(pb);
}
if (data_size >= 16) {
avio_seek(pb, 3, SEEK_CUR); // paddding, transparent
avio_skip(pb, 3); // paddding, transparent
st->sample_aspect_ratio.num = avio_r8(pb);
st->sample_aspect_ratio.den = avio_r8(pb);
}
@ -223,7 +223,7 @@ static int iff_read_header(AVFormatContext *s,
return res;
}
}
avio_seek(pb, data_size - (avio_tell(pb) - orig_pos) + (data_size & 1), SEEK_CUR);
avio_skip(pb, data_size - (avio_tell(pb) - orig_pos) + (data_size & 1));
}
avio_seek(pb, iff->body_pos, SEEK_SET);

View File

@ -372,8 +372,8 @@ static int write_packet(AVFormatContext *s, AVPacket *pkt)
avio_write(pb[0], pkt->data , ysize);
avio_write(pb[1], pkt->data + ysize, (pkt->size - ysize)/2);
avio_write(pb[2], pkt->data + ysize +(pkt->size - ysize)/2, (pkt->size - ysize)/2);
put_flush_packet(pb[1]);
put_flush_packet(pb[2]);
avio_flush(pb[1]);
avio_flush(pb[2]);
avio_close(pb[1]);
avio_close(pb[2]);
}else{
@ -402,7 +402,7 @@ static int write_packet(AVFormatContext *s, AVPacket *pkt)
}
avio_write(pb[0], pkt->data, pkt->size);
}
put_flush_packet(pb[0]);
avio_flush(pb[0]);
if (!img->is_pipe) {
avio_close(pb[0]);
}

View File

@ -35,11 +35,11 @@ static int ingenient_read_packet(AVFormatContext *s, AVPacket *pkt)
w = avio_rl16(s->pb);
h = avio_rl16(s->pb);
avio_seek(s->pb, 8, SEEK_CUR); // zero + size (padded?)
avio_seek(s->pb, 2, SEEK_CUR);
avio_skip(s->pb, 8); // zero + size (padded?)
avio_skip(s->pb, 2);
unk1 = avio_rl16(s->pb);
unk2 = avio_rl16(s->pb);
avio_seek(s->pb, 22, SEEK_CUR); // ASCII timestamp
avio_skip(s->pb, 22); // ASCII timestamp
av_log(s, AV_LOG_DEBUG, "Ingenient packet: size=%d, width=%d, height=%d, unk1=%d unk2=%d\n",
size, w, h, unk1, unk2);

View File

@ -239,4 +239,15 @@ AVChapter *ff_new_chapter(AVFormatContext *s, int id, AVRational time_base,
*/
void ff_reduce_index(AVFormatContext *s, int stream_index);
/*
* Convert a relative url into an absolute url, given a base url.
*
* @param buf the buffer where output absolute url is written
* @param size the size of buf
* @param base the base url, may be equal to buf.
* @param rel the new url, which is interpreted relative to base
*/
void ff_make_absolute_url(char *buf, int size, const char *base,
const char *rel);
#endif /* AVFORMAT_INTERNAL_H */

View File

@ -299,12 +299,12 @@ static int process_ipmovie_chunk(IPMVEContext *s, AVIOContext *pb,
case OPCODE_END_OF_STREAM:
debug_ipmovie("end of stream\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_END_OF_CHUNK:
debug_ipmovie("end of chunk\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_CREATE_TIMER:
@ -359,7 +359,7 @@ static int process_ipmovie_chunk(IPMVEContext *s, AVIOContext *pb,
case OPCODE_START_STOP_AUDIO:
debug_ipmovie("start/stop audio\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_INIT_VIDEO_BUFFERS:
@ -393,12 +393,12 @@ static int process_ipmovie_chunk(IPMVEContext *s, AVIOContext *pb,
case OPCODE_UNKNOWN_14:
case OPCODE_UNKNOWN_15:
debug_ipmovie("unknown (but documented) opcode %02X\n", opcode_type);
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_SEND_BUFFER:
debug_ipmovie("send buffer\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_AUDIO_FRAME:
@ -407,22 +407,22 @@ static int process_ipmovie_chunk(IPMVEContext *s, AVIOContext *pb,
/* log position and move on for now */
s->audio_chunk_offset = avio_tell(pb);
s->audio_chunk_size = opcode_size;
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_SILENCE_FRAME:
debug_ipmovie("silence frame\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_INIT_VIDEO_MODE:
debug_ipmovie("initialize video mode\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_CREATE_GRADIENT:
debug_ipmovie("create gradient\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_SET_PALETTE:
@ -464,7 +464,7 @@ static int process_ipmovie_chunk(IPMVEContext *s, AVIOContext *pb,
case OPCODE_SET_PALETTE_COMPRESSED:
debug_ipmovie("set palette compressed\n");
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_SET_DECODING_MAP:
@ -473,7 +473,7 @@ static int process_ipmovie_chunk(IPMVEContext *s, AVIOContext *pb,
/* log position and move on for now */
s->decode_map_chunk_offset = avio_tell(pb);
s->decode_map_chunk_size = opcode_size;
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
case OPCODE_VIDEO_DATA:
@ -482,7 +482,7 @@ static int process_ipmovie_chunk(IPMVEContext *s, AVIOContext *pb,
/* log position and move on for now */
s->video_chunk_offset = avio_tell(pb);
s->video_chunk_size = opcode_size;
avio_seek(pb, opcode_size, SEEK_CUR);
avio_skip(pb, opcode_size);
break;
default:

Some files were not shown because too many files have changed in this diff Show More