1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

Merge remote-tracking branch 'qatar/master'

* qatar/master:
  vf_hqdn3d: Don't declare the loop variable within the for loop
  huffyuv: update to current coding style
  huffman: update to current coding style
  rtsp: Free the rtpdec context properly
  build: fft: x86: Drop unused YASM-OBJS-FFT- variable

Conflicts:
	libavcodec/huffman.c
	libavcodec/huffyuv.c
	libavcodec/x86/Makefile
	libavfilter/vf_hqdn3d.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2012-08-27 16:32:44 +02:00
commit 4abb88d7e8
5 changed files with 601 additions and 543 deletions

View File

@ -32,7 +32,9 @@
#define HNODE -1
static void get_tree_codes(uint32_t *bits, int16_t *lens, uint8_t *xlat, Node *nodes, int node, uint32_t pfx, int pl, int *pos, int no_zero_count)
static void get_tree_codes(uint32_t *bits, int16_t *lens, uint8_t *xlat,
Node *nodes, int node,
uint32_t pfx, int pl, int *pos, int no_zero_count)
{
int s;
@ -45,11 +47,11 @@ static void get_tree_codes(uint32_t *bits, int16_t *lens, uint8_t *xlat, Node *n
} else {
pfx <<= 1;
pl++;
get_tree_codes(bits, lens, xlat, nodes, nodes[node].n0, pfx, pl, pos,
no_zero_count);
get_tree_codes(bits, lens, xlat, nodes, nodes[node].n0, pfx, pl,
pos, no_zero_count);
pfx |= 1;
get_tree_codes(bits, lens, xlat, nodes, nodes[node].n0+1, pfx, pl, pos,
no_zero_count);
get_tree_codes(bits, lens, xlat, nodes, nodes[node].n0 + 1, pfx, pl,
pos, no_zero_count);
}
}
@ -61,7 +63,8 @@ static int build_huff_tree(VLC *vlc, Node *nodes, int head, int flags)
uint8_t xlat[256];
int pos = 0;
get_tree_codes(bits, lens, xlat, nodes, head, 0, 0, &pos, no_zero_count);
get_tree_codes(bits, lens, xlat, nodes, head, 0, 0,
&pos, no_zero_count);
return ff_init_vlc_sparse(vlc, 9, pos, lens, 2, 2, bits, 4, 4, xlat, 1, 1, 0);
}
@ -84,7 +87,9 @@ int ff_huff_build_tree(AVCodecContext *avctx, VLC *vlc, int nb_codes,
}
if (sum >> 31) {
av_log(avctx, AV_LOG_ERROR, "Too high symbol frequencies. Tree construction is not possible\n");
av_log(avctx, AV_LOG_ERROR,
"Too high symbol frequencies. "
"Tree construction is not possible\n");
return -1;
}
qsort(nodes, nb_codes, sizeof(Node), cmp);

View File

@ -138,7 +138,9 @@ static const unsigned char classic_add_chroma[256] = {
6, 12, 8, 10, 7, 9, 6, 4, 6, 2, 2, 3, 3, 3, 3, 2,
};
static inline int sub_left_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, int w, int left){
static inline int sub_left_prediction(HYuvContext *s, uint8_t *dst,
const uint8_t *src, int w, int left)
{
int i;
if (w < 32) {
for (i = 0; i < w; i++) {
@ -158,7 +160,10 @@ static inline int sub_left_prediction(HYuvContext *s, uint8_t *dst, const uint8_
}
}
static inline void sub_left_prediction_bgr32(HYuvContext *s, uint8_t *dst, const uint8_t *src, int w, int *red, int *green, int *blue, int *alpha){
static inline void sub_left_prediction_bgr32(HYuvContext *s, uint8_t *dst,
const uint8_t *src, int w,
int *red, int *green, int *blue, int *alpha)
{
int i;
int r,g,b,a;
r = *red;
@ -179,7 +184,9 @@ static inline void sub_left_prediction_bgr32(HYuvContext *s, uint8_t *dst, const
b = bt;
a = at;
}
s->dsp.diff_bytes(dst + 16, src + 16, src + 12, w * 4 - 16);
*red = src[(w - 1) * 4 + R];
*green = src[(w - 1) * 4 + G];
*blue = src[(w - 1) * 4 + B];
@ -203,13 +210,16 @@ static inline void sub_left_prediction_rgb24(HYuvContext *s, uint8_t *dst, const
g = gt;
b = bt;
}
s->dsp.diff_bytes(dst + 48, src + 48, src + 48 - 3, w*3 - 48);
*red = src[(w - 1)*3 + 0];
*green = src[(w - 1)*3 + 1];
*blue = src[(w - 1)*3 + 2];
}
static int read_len_table(uint8_t *dst, GetBitContext *gb){
static int read_len_table(uint8_t *dst, GetBitContext *gb)
{
int i, val, repeat;
for (i = 0; i < 256;) {
@ -217,7 +227,6 @@ static int read_len_table(uint8_t *dst, GetBitContext *gb){
val = get_bits(gb, 5);
if (repeat == 0)
repeat = get_bits(gb, 8);
//printf("%d %d\n", val, repeat);
if (i + repeat > 256 || get_bits_left(gb) < 0) {
av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
return -1;
@ -228,7 +237,8 @@ static int read_len_table(uint8_t *dst, GetBitContext *gb){
return 0;
}
static int generate_bits_table(uint32_t *dst, const uint8_t *len_table){
static int generate_bits_table(uint32_t *dst, const uint8_t *len_table)
{
int len, index;
uint32_t bits = 0;
@ -246,7 +256,8 @@ static int generate_bits_table(uint32_t *dst, const uint8_t *len_table){
return 0;
}
static void generate_joint_tables(HYuvContext *s){
static void generate_joint_tables(HYuvContext *s)
{
uint16_t symbols[1 << VLC_BITS];
uint16_t bits[1 << VLC_BITS];
uint8_t len[1 << VLC_BITS];
@ -270,14 +281,15 @@ static void generate_joint_tables(HYuvContext *s){
}
}
ff_free_vlc(&s->vlc[3 + p]);
ff_init_vlc_sparse(&s->vlc[3+p], VLC_BITS, i, len, 1, 1, bits, 2, 2, symbols, 2, 2, 0);
ff_init_vlc_sparse(&s->vlc[3 + p], VLC_BITS, i, len, 1, 1,
bits, 2, 2, symbols, 2, 2, 0);
}
} else {
uint8_t (*map)[4] = (uint8_t(*)[4])s->pix_bgr_map;
int i, b, g, r, code;
int p0 = s->decorrelate;
int p1 = !s->decorrelate;
// restrict the range to +/-16 becaues that's pretty much guaranteed to
// restrict the range to +/-16 because that's pretty much guaranteed to
// cover all the combinations that fit in 11 bits total, and it doesn't
// matter if we miss a few rare codes.
for (i = 0, g = -16; g < 16; g++) {
@ -315,7 +327,8 @@ static void generate_joint_tables(HYuvContext *s){
}
}
static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length){
static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length)
{
GetBitContext gb;
int i;
@ -328,7 +341,8 @@ static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length){
return -1;
}
ff_free_vlc(&s->vlc[i]);
init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1, s->bits[i], 4, 4, 0);
init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
s->bits[i], 4, 4, 0);
}
generate_joint_tables(s);
@ -336,14 +350,18 @@ static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length){
return (get_bits_count(&gb) + 7) / 8;
}
static int read_old_huffman_tables(HYuvContext *s){
static int read_old_huffman_tables(HYuvContext *s)
{
GetBitContext gb;
int i;
init_get_bits(&gb, classic_shift_luma, classic_shift_luma_table_size*8);
init_get_bits(&gb, classic_shift_luma,
classic_shift_luma_table_size * 8);
if (read_len_table(s->len[0], &gb) < 0)
return -1;
init_get_bits(&gb, classic_shift_chroma, classic_shift_chroma_table_size*8);
init_get_bits(&gb, classic_shift_chroma,
classic_shift_chroma_table_size * 8);
if (read_len_table(s->len[1], &gb) < 0)
return -1;
@ -359,7 +377,8 @@ static int read_old_huffman_tables(HYuvContext *s){
for (i = 0; i < 3; i++) {
ff_free_vlc(&s->vlc[i]);
init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1, s->bits[i], 4, 4, 0);
init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
s->bits[i], 4, 4, 0);
}
generate_joint_tables(s);
@ -367,7 +386,8 @@ static int read_old_huffman_tables(HYuvContext *s){
return 0;
}
static av_cold void alloc_temp(HYuvContext *s){
static av_cold void alloc_temp(HYuvContext *s)
{
int i;
if (s->bitstream_bpp<24) {
@ -379,7 +399,8 @@ static av_cold void alloc_temp(HYuvContext *s){
}
}
static av_cold int common_init(AVCodecContext *avctx){
static av_cold int common_init(AVCodecContext *avctx)
{
HYuvContext *s = avctx->priv_data;
s->avctx = avctx;
@ -407,10 +428,10 @@ static av_cold int decode_init(AVCodecContext *avctx)
s->interlaced = s->height > 288;
s->bgr32 = 1;
//if(avctx->extradata)
// printf("extradata:%X, extradata_size:%d\n", *(uint32_t*)avctx->extradata, avctx->extradata_size);
if (avctx->extradata_size) {
if((avctx->bits_per_coded_sample&7) && avctx->bits_per_coded_sample != 12)
if ((avctx->bits_per_coded_sample & 7) &&
avctx->bits_per_coded_sample != 12)
s->version = 1; // do such files exist at all?
else
s->version = 2;
@ -433,7 +454,8 @@ s->bgr32=1;
s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced;
s->context = ((uint8_t*)avctx->extradata)[2] & 0x40 ? 1 : 0;
if(read_huffman_tables(s, ((uint8_t*)avctx->extradata)+4, avctx->extradata_size-4) < 0)
if ( read_huffman_tables(s, ((uint8_t*)avctx->extradata) + 4,
avctx->extradata_size - 4) < 0)
return -1;
}else{
switch (avctx->bits_per_coded_sample & 7) {
@ -495,8 +517,6 @@ s->bgr32=1;
alloc_temp(s);
// av_log(NULL, AV_LOG_DEBUG, "pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_coded_sample, s->interlaced);
return 0;
}
@ -512,7 +532,8 @@ static av_cold int decode_init_thread_copy(AVCodecContext *avctx)
s->vlc[i].table = NULL;
if (s->version == 2) {
if(read_huffman_tables(s, ((uint8_t*)avctx->extradata)+4, avctx->extradata_size) < 0)
if (read_huffman_tables(s, ((uint8_t*)avctx->extradata) + 4,
avctx->extradata_size) < 0)
return -1;
} else {
if (read_old_huffman_tables(s) < 0)
@ -524,7 +545,8 @@ static av_cold int decode_init_thread_copy(AVCodecContext *avctx)
#endif /* CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER */
#if CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER
static int store_table(HYuvContext *s, const uint8_t *len, uint8_t *buf){
static int store_table(HYuvContext *s, const uint8_t *len, uint8_t *buf)
{
int i;
int index = 0;
@ -586,26 +608,34 @@ static av_cold int encode_init(AVCodecContext *avctx)
if (avctx->context_model == 1) {
s->context = avctx->context_model;
if (s->flags & (CODEC_FLAG_PASS1|CODEC_FLAG_PASS2)) {
av_log(avctx, AV_LOG_ERROR, "context=1 is not compatible with 2 pass huffyuv encoding\n");
av_log(avctx, AV_LOG_ERROR,
"context=1 is not compatible with "
"2 pass huffyuv encoding\n");
return -1;
}
}else s->context= 0;
if (avctx->codec->id == AV_CODEC_ID_HUFFYUV) {
if (avctx->pix_fmt == PIX_FMT_YUV420P) {
av_log(avctx, AV_LOG_ERROR, "Error: YV12 is not supported by huffyuv; use vcodec=ffvhuff or format=422p\n");
av_log(avctx, AV_LOG_ERROR,
"Error: YV12 is not supported by huffyuv; use "
"vcodec=ffvhuff or format=422p\n");
return -1;
}
if (avctx->context_model) {
av_log(avctx, AV_LOG_ERROR, "Error: per-frame huffman tables are not supported by huffyuv; use vcodec=ffvhuff\n");
av_log(avctx, AV_LOG_ERROR,
"Error: per-frame huffman tables are not supported "
"by huffyuv; use vcodec=ffvhuff\n");
return -1;
}
if (s->interlaced != ( s->height > 288 ))
av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n");
av_log(avctx, AV_LOG_INFO,
"using huffyuv 2.2.0 or newer interlacing flag\n");
}
if (s->bitstream_bpp >= 24 && s->predictor == MEDIAN) {
av_log(avctx, AV_LOG_ERROR, "Error: RGB is incompatible with median predictor\n");
av_log(avctx, AV_LOG_ERROR,
"Error: RGB is incompatible with median predictor\n");
return -1;
}
@ -670,8 +700,6 @@ static av_cold int encode_init(AVCodecContext *avctx)
s->stats[i][j]= 0;
}
// printf("pred:%d bpp:%d hbpp:%d il:%d\n", s->predictor, s->bitstream_bpp, avctx->bits_per_coded_sample, s->interlaced);
alloc_temp(s);
s->picture_number=0;
@ -693,7 +721,8 @@ static av_cold int encode_init(AVCodecContext *avctx)
}\
}
static void decode_422_bitstream(HYuvContext *s, int count){
static void decode_422_bitstream(HYuvContext *s, int count)
{
int i;
count /= 2;
@ -711,7 +740,8 @@ static void decode_422_bitstream(HYuvContext *s, int count){
}
}
static void decode_gray_bitstream(HYuvContext *s, int count){
static void decode_gray_bitstream(HYuvContext *s, int count)
{
int i;
count/=2;
@ -728,7 +758,8 @@ static void decode_gray_bitstream(HYuvContext *s, int count){
}
#if CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER
static int encode_422_bitstream(HYuvContext *s, int offset, int count){
static int encode_422_bitstream(HYuvContext *s, int offset, int count)
{
int i;
const uint8_t *y = s->temp[0] + offset;
const uint8_t *u = s->temp[1] + offset / 2;
@ -746,6 +777,7 @@ static int encode_422_bitstream(HYuvContext *s, int offset, int count){
int v0 = v[i];
count /= 2;
if (s->flags & CODEC_FLAG_PASS1) {
for(i = 0; i < count; i++) {
LOAD4;
@ -781,7 +813,8 @@ static int encode_422_bitstream(HYuvContext *s, int offset, int count){
return 0;
}
static int encode_gray_bitstream(HYuvContext *s, int count){
static int encode_gray_bitstream(HYuvContext *s, int count)
{
int i;
if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < 4 * count) {
@ -800,6 +833,7 @@ static int encode_gray_bitstream(HYuvContext *s, int count){
put_bits(&s->pb, s->len[0][y1], s->bits[0][y1]);
count /= 2;
if (s->flags & CODEC_FLAG_PASS1) {
for (i = 0; i < count; i++) {
LOAD2;
@ -825,7 +859,9 @@ static int encode_gray_bitstream(HYuvContext *s, int count){
}
#endif /* CONFIG_HUFFYUV_ENCODER || CONFIG_FFVHUFF_ENCODER */
static av_always_inline void decode_bgr_1(HYuvContext *s, int count, int decorrelate, int alpha){
static av_always_inline void decode_bgr_1(HYuvContext *s, int count,
int decorrelate, int alpha)
{
int i;
for (i = 0; i < count; i++) {
int code = get_vlc2(&s->gb, s->vlc[3].table, VLC_BITS, 1);
@ -833,8 +869,10 @@ static av_always_inline void decode_bgr_1(HYuvContext *s, int count, int decorre
*(uint32_t*)&s->temp[0][4 * i] = s->pix_bgr_map[code];
} else if(decorrelate) {
s->temp[0][4 * i + G] = get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3);
s->temp[0][4*i+B] = get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3) + s->temp[0][4*i+G];
s->temp[0][4*i+R] = get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3) + s->temp[0][4*i+G];
s->temp[0][4 * i + B] = get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3) +
s->temp[0][4 * i + G];
s->temp[0][4 * i + R] = get_vlc2(&s->gb, s->vlc[2].table, VLC_BITS, 3) +
s->temp[0][4 * i + G];
} else {
s->temp[0][4 * i + B] = get_vlc2(&s->gb, s->vlc[0].table, VLC_BITS, 3);
s->temp[0][4 * i + G] = get_vlc2(&s->gb, s->vlc[1].table, VLC_BITS, 3);
@ -845,7 +883,8 @@ static av_always_inline void decode_bgr_1(HYuvContext *s, int count, int decorre
}
}
static void decode_bgr_bitstream(HYuvContext *s, int count){
static void decode_bgr_bitstream(HYuvContext *s, int count)
{
if (s->decorrelate) {
if (s->bitstream_bpp==24)
decode_bgr_1(s, count, 1, 0);
@ -859,7 +898,8 @@ static void decode_bgr_bitstream(HYuvContext *s, int count){
}
}
static inline int encode_bgra_bitstream(HYuvContext *s, int count, int planes){
static inline int encode_bgra_bitstream(HYuvContext *s, int count, int planes)
{
int i;
if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < 4*planes*count) {
@ -883,7 +923,8 @@ static inline int encode_bgra_bitstream(HYuvContext *s, int count, int planes){
put_bits(&s->pb, s->len[2][r], s->bits[2][r]);\
if(planes==4) put_bits(&s->pb, s->len[2][a], s->bits[2][a]);
if((s->flags&CODEC_FLAG_PASS1) && (s->avctx->flags2&CODEC_FLAG2_NO_OUTPUT)){
if ((s->flags & CODEC_FLAG_PASS1) &&
(s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)) {
for (i = 0; i < count; i++) {
LOAD3;
STAT3;
@ -904,7 +945,8 @@ static inline int encode_bgra_bitstream(HYuvContext *s, int count, int planes){
}
#if CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER
static void draw_slice(HYuvContext *s, int y){
static void draw_slice(HYuvContext *s, int y)
{
int h, cy, i;
int offset[AV_NUM_DATA_POINTERS];
@ -932,7 +974,9 @@ static void draw_slice(HYuvContext *s, int y){
s->last_slice_end = y + h;
}
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt){
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size,
AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
HYuvContext *s = avctx->priv_data;
@ -945,12 +989,15 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPac
AVFrame *picture = data;
av_fast_malloc(&s->bitstream_buffer, &s->bitstream_buffer_size, buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
av_fast_malloc(&s->bitstream_buffer,
&s->bitstream_buffer_size,
buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
if (!s->bitstream_buffer)
return AVERROR(ENOMEM);
memset(s->bitstream_buffer + buf_size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
s->dsp.bswap_buf((uint32_t*)s->bitstream_buffer, (const uint32_t*)buf, buf_size/4);
s->dsp.bswap_buf((uint32_t*)s->bitstream_buffer,
(const uint32_t*)buf, buf_size / 4);
if (p->data[0])
ff_thread_release_buffer(avctx, p);
@ -970,7 +1017,8 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPac
if ((unsigned)(buf_size-table_size) >= INT_MAX / 8)
return -1;
init_get_bits(&s->gb, s->bitstream_buffer+table_size, (buf_size-table_size)*8);
init_get_bits(&s->gb, s->bitstream_buffer+table_size,
(buf_size-table_size) * 8);
fake_ystride = s->interlaced ? p->linesize[0] * 2 : p->linesize[0];
fake_ustride = s->interlaced ? p->linesize[1] * 2 : p->linesize[1];
@ -989,7 +1037,8 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPac
p->data[0][1] = get_bits(&s->gb, 8);
p->data[0][0] = get_bits(&s->gb, 8);
av_log(avctx, AV_LOG_ERROR, "YUY2 output is not implemented yet\n");
av_log(avctx, AV_LOG_ERROR,
"YUY2 output is not implemented yet\n");
return -1;
} else {
@ -1154,20 +1203,24 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPac
s->dsp.add_hfyu_left_prediction_bgr32(p->data[0] + p->linesize[0]*y, s->temp[0], width, &leftr, &leftg, &leftb, &lefta);
if (s->predictor == PLANE) {
if (s->bitstream_bpp != 32) lefta = 0;
if((y&s->interlaced)==0 && y<s->height-1-s->interlaced){
if ((y & s->interlaced) == 0 &&
y < s->height - 1 - s->interlaced) {
s->dsp.add_bytes(p->data[0] + p->linesize[0] * y,
p->data[0] + p->linesize[0]*y + fake_ystride, fake_ystride);
p->data[0] + p->linesize[0] * y +
fake_ystride, fake_ystride);
}
}
}
draw_slice(s, height); // just 1 large slice as this is not possible in reverse order
// just 1 large slice as this is not possible in reverse order
draw_slice(s, height);
break;
default:
av_log(avctx, AV_LOG_ERROR, "prediction type not supported!\n");
av_log(avctx, AV_LOG_ERROR,
"prediction type not supported!\n");
}
}else{
av_log(avctx, AV_LOG_ERROR, "BGR24 output is not implemented yet\n");
av_log(avctx, AV_LOG_ERROR,
"BGR24 output is not implemented yet\n");
return -1;
}
}
@ -1180,7 +1233,8 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPac
}
#endif /* CONFIG_HUFFYUV_DECODER || CONFIG_FFVHUFF_DECODER */
static int common_end(HYuvContext *s){
static int common_end(HYuvContext *s)
{
int i;
for(i = 0; i < 3; i++) {
@ -1245,7 +1299,8 @@ static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
init_put_bits(&s->pb, pkt->data + size, pkt->size - size);
if(avctx->pix_fmt == PIX_FMT_YUV422P || avctx->pix_fmt == PIX_FMT_YUV420P){
if (avctx->pix_fmt == PIX_FMT_YUV422P ||
avctx->pix_fmt == PIX_FMT_YUV420P) {
int lefty, leftu, leftv, y, cy;
put_bits(&s->pb, 8, leftv = p->data[2][0]);

View File

@ -42,11 +42,9 @@ YASM-OBJS-$(CONFIG_AAC_DECODER) += x86/sbrdsp.o
YASM-OBJS-$(CONFIG_AC3DSP) += x86/ac3dsp.o
YASM-OBJS-$(CONFIG_DCT) += x86/dct32_sse.o
YASM-OBJS-$(CONFIG_DIRAC_DECODER) += x86/diracdsp_mmx.o x86/diracdsp_yasm.o
YASM-OBJS-$(CONFIG_ENCODERS) += x86/dsputilenc.o
YASM-OBJS-$(CONFIG_FFT) += x86/fft_mmx.o \
$(YASM-OBJS-FFT-yes)
YASM-OBJS-$(CONFIG_DWT) += x86/dwt_yasm.o
YASM-OBJS-$(CONFIG_ENCODERS) += x86/dsputilenc.o
YASM-OBJS-$(CONFIG_FFT) += x86/fft_mmx.o
YASM-OBJS-$(CONFIG_H264CHROMA) += x86/h264_chromamc.o \
x86/h264_chromamc_10bit.o
YASM-OBJS-$(CONFIG_H264DSP) += x86/h264_deblock.o \

View File

@ -568,7 +568,7 @@ void ff_rtsp_undo_setup(AVFormatContext *s)
avformat_free_context(rtpctx);
} else if (rt->transport == RTSP_TRANSPORT_RDT && CONFIG_RTPDEC)
ff_rdt_parse_close(rtsp_st->transport_priv);
else if (rt->transport == RTSP_TRANSPORT_RAW && CONFIG_RTPDEC)
else if (rt->transport == RTSP_TRANSPORT_RTP && CONFIG_RTPDEC)
ff_rtp_parse_close(rtsp_st->transport_priv);
}
rtsp_st->transport_priv = NULL;