mirror of
https://github.com/FFmpeg/FFmpeg.git
synced 2024-12-23 12:43:46 +02:00
fix some a/an typos
Signed-off-by: Lou Logan <lou@lrcd.com>
This commit is contained in:
parent
99f2a59c2f
commit
06eef96b69
@ -1429,7 +1429,7 @@ PERL_PATH = /usr/bin/perl
|
|||||||
#---------------------------------------------------------------------------
|
#---------------------------------------------------------------------------
|
||||||
|
|
||||||
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
||||||
# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
|
# generate an inheritance diagram (in HTML, RTF and LaTeX) for classes with base
|
||||||
# or super classes. Setting the tag to NO turns the diagrams off. Note that
|
# or super classes. Setting the tag to NO turns the diagrams off. Note that
|
||||||
# this option is superseded by the HAVE_DOT option below. This is only a
|
# this option is superseded by the HAVE_DOT option below. This is only a
|
||||||
# fallback. It is recommended to install and use dot, since it yields more
|
# fallback. It is recommended to install and use dot, since it yields more
|
||||||
|
@ -257,7 +257,7 @@ Specify how strictly to follow the standards.
|
|||||||
Possible values:
|
Possible values:
|
||||||
@table @samp
|
@table @samp
|
||||||
@item very
|
@item very
|
||||||
strictly conform to a older more strict version of the spec or reference software
|
strictly conform to an older more strict version of the spec or reference software
|
||||||
@item strict
|
@item strict
|
||||||
strictly conform to all the things in the spec no matter what consequences
|
strictly conform to all the things in the spec no matter what consequences
|
||||||
@item normal
|
@item normal
|
||||||
|
@ -65,7 +65,7 @@ static int open_input_file(const char *filename)
|
|||||||
/* select the audio stream */
|
/* select the audio stream */
|
||||||
ret = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, &dec, 0);
|
ret = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, &dec, 0);
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
av_log(NULL, AV_LOG_ERROR, "Cannot find a audio stream in the input file\n");
|
av_log(NULL, AV_LOG_ERROR, "Cannot find an audio stream in the input file\n");
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
audio_stream_index = ret;
|
audio_stream_index = ret;
|
||||||
|
@ -527,7 +527,7 @@ Wavelet Transform:
|
|||||||
==================
|
==================
|
||||||
|
|
||||||
Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integer
|
Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integer
|
||||||
transform and a integer approximation of the symmetric biorthogonal 9/7
|
transform and an integer approximation of the symmetric biorthogonal 9/7
|
||||||
daubechies wavelet.
|
daubechies wavelet.
|
||||||
|
|
||||||
2D IDWT (inverse discrete wavelet transform)
|
2D IDWT (inverse discrete wavelet transform)
|
||||||
|
@ -656,7 +656,7 @@ int configure_output_filter(FilterGraph *fg, OutputFilter *ofilter, AVFilterInOu
|
|||||||
DESCRIBE_FILTER_LINK(ofilter, out, 0);
|
DESCRIBE_FILTER_LINK(ofilter, out, 0);
|
||||||
|
|
||||||
if (!ofilter->ost) {
|
if (!ofilter->ost) {
|
||||||
av_log(NULL, AV_LOG_FATAL, "Filter %s has a unconnected output\n", ofilter->name);
|
av_log(NULL, AV_LOG_FATAL, "Filter %s has an unconnected output\n", ofilter->name);
|
||||||
exit_program(1);
|
exit_program(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,7 +111,7 @@ ASSSplitContext *ff_ass_split(const char *buf);
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Split one or several ASS "Dialogue" lines from a string buffer and store
|
* Split one or several ASS "Dialogue" lines from a string buffer and store
|
||||||
* them in a already initialized context.
|
* them in an already initialized context.
|
||||||
*
|
*
|
||||||
* @param ctx Context previously initialized by ff_ass_split().
|
* @param ctx Context previously initialized by ff_ass_split().
|
||||||
* @param buf String containing the ASS "Dialogue" lines.
|
* @param buf String containing the ASS "Dialogue" lines.
|
||||||
|
@ -91,7 +91,7 @@
|
|||||||
* details.
|
* details.
|
||||||
*
|
*
|
||||||
* If you add a codec ID to this list, add it so that
|
* If you add a codec ID to this list, add it so that
|
||||||
* 1. no value of a existing codec ID changes (that would break ABI),
|
* 1. no value of an existing codec ID changes (that would break ABI),
|
||||||
* 2. it is as close as possible to similar codecs
|
* 2. it is as close as possible to similar codecs
|
||||||
*
|
*
|
||||||
* After adding new codec IDs, do not forget to add an entry to the codec
|
* After adding new codec IDs, do not forget to add an entry to the codec
|
||||||
|
@ -167,7 +167,7 @@ static int build_table(VLC *vlc, int table_nb_bits, int nb_codes,
|
|||||||
int table_size, table_index, index, code_prefix, symbol, subtable_bits;
|
int table_size, table_index, index, code_prefix, symbol, subtable_bits;
|
||||||
int i, j, k, n, nb, inc;
|
int i, j, k, n, nb, inc;
|
||||||
uint32_t code;
|
uint32_t code;
|
||||||
volatile VLC_TYPE (* volatile table)[2]; // the double volatile is needed to prevent a internal compiler error in gcc 4.2
|
volatile VLC_TYPE (* volatile table)[2]; // the double volatile is needed to prevent an internal compiler error in gcc 4.2
|
||||||
|
|
||||||
table_size = 1 << table_nb_bits;
|
table_size = 1 << table_nb_bits;
|
||||||
if (table_nb_bits > 30)
|
if (table_nb_bits > 30)
|
||||||
|
@ -134,7 +134,7 @@ static int cpia_decode_frame(AVCodecContext *avctx,
|
|||||||
v_end = v + frame->linesize[2] - 1;
|
v_end = v + frame->linesize[2] - 1;
|
||||||
|
|
||||||
if ((i & 1) && header[17] == SUBSAMPLE_420) {
|
if ((i & 1) && header[17] == SUBSAMPLE_420) {
|
||||||
/* We are on a odd line and 420 subsample is used.
|
/* We are on an odd line and 420 subsample is used.
|
||||||
* On this line only Y values are specified, one per pixel.
|
* On this line only Y values are specified, one per pixel.
|
||||||
*/
|
*/
|
||||||
for (j = 0; j < linelength - 1; j++) {
|
for (j = 0; j < linelength - 1; j++) {
|
||||||
|
@ -167,7 +167,7 @@ static int h263_decode_gob_header(MpegEncContext *s)
|
|||||||
/* We have a GBSC probably with GSTUFF */
|
/* We have a GBSC probably with GSTUFF */
|
||||||
skip_bits(&s->gb, 16); /* Drop the zeros */
|
skip_bits(&s->gb, 16); /* Drop the zeros */
|
||||||
left= get_bits_left(&s->gb);
|
left= get_bits_left(&s->gb);
|
||||||
//MN: we must check the bits left or we might end in a infinite loop (or segfault)
|
//MN: we must check the bits left or we might end in an infinite loop (or segfault)
|
||||||
for(;left>13; left--){
|
for(;left>13; left--){
|
||||||
if(get_bits1(&s->gb)) break; /* Seek the '1' bit */
|
if(get_bits1(&s->gb)) break; /* Seek the '1' bit */
|
||||||
}
|
}
|
||||||
@ -313,7 +313,7 @@ static int h263p_decode_umotion(MpegEncContext * s, int pred)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* read the next MVs for OBMC. yes this is a ugly hack, feel free to send a patch :)
|
* read the next MVs for OBMC. yes this is an ugly hack, feel free to send a patch :)
|
||||||
*/
|
*/
|
||||||
static void preview_obmc(MpegEncContext *s){
|
static void preview_obmc(MpegEncContext *s){
|
||||||
GetBitContext gb= s->gb;
|
GetBitContext gb= s->gb;
|
||||||
|
@ -830,7 +830,7 @@ static void switch_buffer(MPADecodeContext *s, int *pos, int *end_pos,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Following is a optimized code for
|
/* Following is an optimized code for
|
||||||
INTFLOAT v = *src
|
INTFLOAT v = *src
|
||||||
if(get_bits1(&s->gb))
|
if(get_bits1(&s->gb))
|
||||||
v = -v;
|
v = -v;
|
||||||
|
@ -781,7 +781,7 @@ static int init_context_frame(MpegEncContext *s)
|
|||||||
s->dc_val_base[i] = 1024;
|
s->dc_val_base[i] = 1024;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* which mb is a intra block */
|
/* which mb is an intra block */
|
||||||
FF_ALLOCZ_OR_GOTO(s->avctx, s->mbintra_table, mb_array_size, fail);
|
FF_ALLOCZ_OR_GOTO(s->avctx, s->mbintra_table, mb_array_size, fail);
|
||||||
memset(s->mbintra_table, 1, mb_array_size);
|
memset(s->mbintra_table, 1, mb_array_size);
|
||||||
|
|
||||||
@ -1224,7 +1224,7 @@ int ff_mpv_frame_start(MpegEncContext *s, AVCodecContext *avctx)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (s->current_picture_ptr && !s->current_picture_ptr->f->buf[0]) {
|
if (s->current_picture_ptr && !s->current_picture_ptr->f->buf[0]) {
|
||||||
// we already have a unused image
|
// we already have an unused image
|
||||||
// (maybe it was set before reading the header)
|
// (maybe it was set before reading the header)
|
||||||
pic = s->current_picture_ptr;
|
pic = s->current_picture_ptr;
|
||||||
} else {
|
} else {
|
||||||
|
@ -354,7 +354,7 @@ static int parse_outputs(const char **buf, AVFilterInOut **curr_inputs,
|
|||||||
av_freep(&match);
|
av_freep(&match);
|
||||||
av_freep(&input);
|
av_freep(&input);
|
||||||
} else {
|
} else {
|
||||||
/* Not in the list, so add the first input as a open_output */
|
/* Not in the list, so add the first input as an open_output */
|
||||||
input->name = name;
|
input->name = name;
|
||||||
insert_inout(open_outputs, input);
|
insert_inout(open_outputs, input);
|
||||||
}
|
}
|
||||||
|
@ -38,7 +38,7 @@
|
|||||||
*
|
*
|
||||||
* Only past frames are used, we should ideally use future frames too,
|
* Only past frames are used, we should ideally use future frames too,
|
||||||
* something like filtering the whole movie in forward and then
|
* something like filtering the whole movie in forward and then
|
||||||
* backward direction seems like a interesting idea but the current
|
* backward direction seems like an interesting idea but the current
|
||||||
* filter framework is FAR from supporting such things.
|
* filter framework is FAR from supporting such things.
|
||||||
*
|
*
|
||||||
* Combining the motion compensated image with the input image also is
|
* Combining the motion compensated image with the input image also is
|
||||||
|
@ -828,7 +828,7 @@ static int asf_read_header(AVFormatContext *s)
|
|||||||
} else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) {
|
} else if (!ff_guidcmp(&g, &ff_asf_ext_stream_header)) {
|
||||||
asf_read_ext_stream_properties(s, gsize);
|
asf_read_ext_stream_properties(s, gsize);
|
||||||
|
|
||||||
// there could be a optional stream properties object to follow
|
// there could be an optional stream properties object to follow
|
||||||
// if so the next iteration will pick it up
|
// if so the next iteration will pick it up
|
||||||
continue;
|
continue;
|
||||||
} else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) {
|
} else if (!ff_guidcmp(&g, &ff_asf_head1_guid)) {
|
||||||
|
@ -2449,7 +2449,7 @@ int av_write_frame(AVFormatContext *s, AVPacket *pkt);
|
|||||||
int av_interleaved_write_frame(AVFormatContext *s, AVPacket *pkt);
|
int av_interleaved_write_frame(AVFormatContext *s, AVPacket *pkt);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write a uncoded frame to an output media file.
|
* Write an uncoded frame to an output media file.
|
||||||
*
|
*
|
||||||
* The frame must be correctly interleaved according to the container
|
* The frame must be correctly interleaved according to the container
|
||||||
* specification; if not, then av_interleaved_write_frame() must be used.
|
* specification; if not, then av_interleaved_write_frame() must be used.
|
||||||
@ -2460,7 +2460,7 @@ int av_write_uncoded_frame(AVFormatContext *s, int stream_index,
|
|||||||
AVFrame *frame);
|
AVFrame *frame);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write a uncoded frame to an output media file.
|
* Write an uncoded frame to an output media file.
|
||||||
*
|
*
|
||||||
* If the muxer supports it, this function makes it possible to write an AVFrame
|
* If the muxer supports it, this function makes it possible to write an AVFrame
|
||||||
* structure directly, without encoding it into a packet.
|
* structure directly, without encoding it into a packet.
|
||||||
|
@ -2523,7 +2523,7 @@ static int mov_write_edts_tag(AVIOContext *pb, MOVMuxContext *mov,
|
|||||||
} else {
|
} else {
|
||||||
/* Avoid accidentally ending up with start_ct = -1 which has got a
|
/* Avoid accidentally ending up with start_ct = -1 which has got a
|
||||||
* special meaning. Normally start_ct should end up positive or zero
|
* special meaning. Normally start_ct should end up positive or zero
|
||||||
* here, but use FFMIN in case dts is a a small positive integer
|
* here, but use FFMIN in case dts is a small positive integer
|
||||||
* rounded to 0 when represented in MOV_TIMESCALE units. */
|
* rounded to 0 when represented in MOV_TIMESCALE units. */
|
||||||
av_assert0(av_rescale_rnd(start_dts, MOV_TIMESCALE, track->timescale, AV_ROUND_DOWN) <= 0);
|
av_assert0(av_rescale_rnd(start_dts, MOV_TIMESCALE, track->timescale, AV_ROUND_DOWN) <= 0);
|
||||||
start_ct = -FFMIN(start_dts, 0);
|
start_ct = -FFMIN(start_dts, 0);
|
||||||
|
@ -2608,7 +2608,7 @@ static int mxf_handle_missing_index_segment(MXFContext *mxf)
|
|||||||
if (mxf->op != OPAtom)
|
if (mxf->op != OPAtom)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
/* TODO: support raw video without a index if they exist */
|
/* TODO: support raw video without an index if they exist */
|
||||||
if (s->nb_streams != 1 || s->streams[0]->codec->codec_type != AVMEDIA_TYPE_AUDIO || !is_pcm(s->streams[0]->codec->codec_id))
|
if (s->nb_streams != 1 || s->streams[0]->codec->codec_type != AVMEDIA_TYPE_AUDIO || !is_pcm(s->streams[0]->codec->codec_id))
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
|
@ -864,7 +864,7 @@ static int64_t ogg_read_timestamp(AVFormatContext *s, int stream_index,
|
|||||||
&& !ogg_packet(s, &i, &pstart, &psize, pos_arg)) {
|
&& !ogg_packet(s, &i, &pstart, &psize, pos_arg)) {
|
||||||
if (i == stream_index) {
|
if (i == stream_index) {
|
||||||
struct ogg_stream *os = ogg->streams + stream_index;
|
struct ogg_stream *os = ogg->streams + stream_index;
|
||||||
// Do not trust the last timestamps of a ogm video
|
// Do not trust the last timestamps of an ogm video
|
||||||
if ( (os->flags & OGG_FLAG_EOS)
|
if ( (os->flags & OGG_FLAG_EOS)
|
||||||
&& !(os->flags & OGG_FLAG_BOS)
|
&& !(os->flags & OGG_FLAG_BOS)
|
||||||
&& os->codec == &ff_ogm_video_codec)
|
&& os->codec == &ff_ogm_video_codec)
|
||||||
|
@ -3124,7 +3124,7 @@ void ff_rfps_calculate(AVFormatContext *ic)
|
|||||||
if (st->codec->codec_type != AVMEDIA_TYPE_VIDEO)
|
if (st->codec->codec_type != AVMEDIA_TYPE_VIDEO)
|
||||||
continue;
|
continue;
|
||||||
// the check for tb_unreliable() is not completely correct, since this is not about handling
|
// the check for tb_unreliable() is not completely correct, since this is not about handling
|
||||||
// a unreliable/inexact time base, but a time base that is finer than necessary, as e.g.
|
// an unreliable/inexact time base, but a time base that is finer than necessary, as e.g.
|
||||||
// ipmovie.c produces.
|
// ipmovie.c produces.
|
||||||
if (tb_unreliable(st->codec) && st->info->duration_count > 15 && st->info->duration_gcd > FFMAX(1, st->time_base.den/(500LL*st->time_base.num)) && !st->r_frame_rate.num)
|
if (tb_unreliable(st->codec) && st->info->duration_count > 15 && st->info->duration_gcd > FFMAX(1, st->time_base.den/(500LL*st->time_base.num)) && !st->r_frame_rate.num)
|
||||||
av_reduce(&st->r_frame_rate.num, &st->r_frame_rate.den, st->time_base.den, st->time_base.num * st->info->duration_gcd, INT_MAX);
|
av_reduce(&st->r_frame_rate.num, &st->r_frame_rate.den, st->time_base.den, st->time_base.num * st->info->duration_gcd, INT_MAX);
|
||||||
|
@ -267,7 +267,7 @@ static int yuv4_write_header(AVFormatContext *s)
|
|||||||
case AV_PIX_FMT_YUV422P16:
|
case AV_PIX_FMT_YUV422P16:
|
||||||
case AV_PIX_FMT_YUV444P16:
|
case AV_PIX_FMT_YUV444P16:
|
||||||
if (s->strict_std_compliance >= FF_COMPLIANCE_NORMAL) {
|
if (s->strict_std_compliance >= FF_COMPLIANCE_NORMAL) {
|
||||||
av_log(s, AV_LOG_ERROR, "'%s' is not a official yuv4mpegpipe pixel format. "
|
av_log(s, AV_LOG_ERROR, "'%s' is not an official yuv4mpegpipe pixel format. "
|
||||||
"Use '-strict -1' to encode to this pixel format.\n",
|
"Use '-strict -1' to encode to this pixel format.\n",
|
||||||
av_get_pix_fmt_name(s->streams[0]->codec->pix_fmt));
|
av_get_pix_fmt_name(s->streams[0]->codec->pix_fmt));
|
||||||
return AVERROR(EINVAL);
|
return AVERROR(EINVAL);
|
||||||
|
@ -156,7 +156,7 @@ static inline size_t av_strnlen(const char *s, size_t len)
|
|||||||
char *av_asprintf(const char *fmt, ...) av_printf_format(1, 2);
|
char *av_asprintf(const char *fmt, ...) av_printf_format(1, 2);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a number to a av_malloced string.
|
* Convert a number to an av_malloced string.
|
||||||
*/
|
*/
|
||||||
char *av_d2str(double d);
|
char *av_d2str(double d);
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@
|
|||||||
/**
|
/**
|
||||||
* Return the flags which specify extensions supported by the CPU.
|
* Return the flags which specify extensions supported by the CPU.
|
||||||
* The returned value is affected by av_force_cpu_flags() if that was used
|
* The returned value is affected by av_force_cpu_flags() if that was used
|
||||||
* before. So av_get_cpu_flags() can easily be used in a application to
|
* before. So av_get_cpu_flags() can easily be used in an application to
|
||||||
* detect the enabled cpu flags.
|
* detect the enabled cpu flags.
|
||||||
*/
|
*/
|
||||||
int av_get_cpu_flags(void);
|
int av_get_cpu_flags(void);
|
||||||
|
@ -120,8 +120,8 @@ void av_tree_destroy(struct AVTreeNode *t);
|
|||||||
/**
|
/**
|
||||||
* Apply enu(opaque, &elem) to all the elements in the tree in a given range.
|
* Apply enu(opaque, &elem) to all the elements in the tree in a given range.
|
||||||
*
|
*
|
||||||
* @param cmp a comparison function that returns < 0 for a element below the
|
* @param cmp a comparison function that returns < 0 for an element below the
|
||||||
* range, > 0 for a element above the range and == 0 for a
|
* range, > 0 for an element above the range and == 0 for an
|
||||||
* element inside the range
|
* element inside the range
|
||||||
*
|
*
|
||||||
* @note The cmp function should use the same ordering used to construct the
|
* @note The cmp function should use the same ordering used to construct the
|
||||||
|
@ -960,7 +960,7 @@ typedef struct SwsPlane
|
|||||||
} SwsPlane;
|
} SwsPlane;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Struct which defines a slice of an image to be scaled or a output for
|
* Struct which defines a slice of an image to be scaled or an output for
|
||||||
* a scaled slice.
|
* a scaled slice.
|
||||||
* A slice can also be used as intermediate ring buffer for scaling steps.
|
* A slice can also be used as intermediate ring buffer for scaling steps.
|
||||||
*/
|
*/
|
||||||
|
@ -36,7 +36,7 @@ I<path/to/dvd/structure> is the path to the DVD structure hierarchy; it
|
|||||||
normally contains a directory named B<VIDEO_TS>. It must not be encrypted
|
normally contains a directory named B<VIDEO_TS>. It must not be encrypted
|
||||||
with CSS.
|
with CSS.
|
||||||
|
|
||||||
I<file.concat> is the output file. It can be used a input to ffmpeg.
|
I<file.concat> is the output file. It can be used as an input to ffmpeg.
|
||||||
It will require the B<-safe 0> option.
|
It will require the B<-safe 0> option.
|
||||||
|
|
||||||
=cut
|
=cut
|
||||||
|
Loading…
Reference in New Issue
Block a user