1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

rtpdec: pass an AVFormatContext to ff_parse_fmtp()

Use it for logging, instead of NULL or the stream codec context.
This commit is contained in:
Anton Khirnov 2014-07-05 07:01:07 +00:00
parent 650d384048
commit 0307cc2253
8 changed files with 39 additions and 29 deletions

View File

@ -826,8 +826,10 @@ void ff_rtp_parse_close(RTPDemuxContext *s)
av_free(s); av_free(s);
} }
int ff_parse_fmtp(AVStream *stream, PayloadContext *data, const char *p, int ff_parse_fmtp(AVFormatContext *s,
int (*parse_fmtp)(AVStream *stream, AVStream *stream, PayloadContext *data, const char *p,
int (*parse_fmtp)(AVFormatContext *s,
AVStream *stream,
PayloadContext *data, PayloadContext *data,
char *attr, char *value)) char *attr, char *value))
{ {
@ -852,7 +854,7 @@ int ff_parse_fmtp(AVStream *stream, PayloadContext *data, const char *p,
while (ff_rtsp_next_attr_and_value(&p, while (ff_rtsp_next_attr_and_value(&p,
attr, sizeof(attr), attr, sizeof(attr),
value, value_size)) { value, value_size)) {
res = parse_fmtp(stream, data, attr, value); res = parse_fmtp(s, stream, data, attr, value);
if (res < 0 && res != AVERROR_PATCHWELCOME) { if (res < 0 && res != AVERROR_PATCHWELCOME) {
av_free(value); av_free(value);
return res; return res;

View File

@ -200,8 +200,10 @@ RTPDynamicProtocolHandler *ff_rtp_handler_find_by_id(int id,
int ff_rtsp_next_attr_and_value(const char **p, char *attr, int attr_size, int ff_rtsp_next_attr_and_value(const char **p, char *attr, int attr_size,
char *value, int value_size); char *value, int value_size);
int ff_parse_fmtp(AVStream *stream, PayloadContext *data, const char *p, int ff_parse_fmtp(AVFormatContext *s,
int (*parse_fmtp)(AVStream *stream, AVStream *stream, PayloadContext *data, const char *p,
int (*parse_fmtp)(AVFormatContext *s,
AVStream *stream,
PayloadContext *data, PayloadContext *data,
char *attr, char *value)); char *attr, char *value));

View File

@ -139,7 +139,8 @@ static int amr_handle_packet(AVFormatContext *ctx, PayloadContext *data,
return 0; return 0;
} }
static int amr_parse_fmtp(AVStream *stream, PayloadContext *data, static int amr_parse_fmtp(AVFormatContext *s,
AVStream *stream, PayloadContext *data,
char *attr, char *value) char *attr, char *value)
{ {
/* Some AMR SDP configurations contain "octet-align", without /* Some AMR SDP configurations contain "octet-align", without
@ -147,8 +148,8 @@ static int amr_parse_fmtp(AVStream *stream, PayloadContext *data,
* interpret it as "1". * interpret it as "1".
*/ */
if (!strcmp(value, "")) { if (!strcmp(value, "")) {
av_log(NULL, AV_LOG_WARNING, "AMR fmtp attribute %s had " av_log(s, AV_LOG_WARNING, "AMR fmtp attribute %s had "
"nonstandard empty value\n", attr); "nonstandard empty value\n", attr);
strcpy(value, "1"); strcpy(value, "1");
} }
if (!strcmp(attr, "octet-align")) if (!strcmp(attr, "octet-align"))
@ -177,7 +178,7 @@ static int amr_parse_sdp_line(AVFormatContext *s, int st_index,
* separated key/value pairs. * separated key/value pairs.
*/ */
if (av_strstart(line, "fmtp:", &p)) { if (av_strstart(line, "fmtp:", &p)) {
ret = ff_parse_fmtp(s->streams[st_index], data, p, amr_parse_fmtp); ret = ff_parse_fmtp(s, s->streams[st_index], data, p, amr_parse_fmtp);
if (!data->octet_align || data->crc || if (!data->octet_align || data->crc ||
data->interleaving || data->channels != 1) { data->interleaving || data->channels != 1) {
av_log(s, AV_LOG_ERROR, "Unsupported RTP/AMR configuration!\n"); av_log(s, AV_LOG_ERROR, "Unsupported RTP/AMR configuration!\n");

View File

@ -64,7 +64,8 @@ struct PayloadContext {
static const uint8_t start_sequence[] = { 0, 0, 0, 1 }; static const uint8_t start_sequence[] = { 0, 0, 0, 1 };
static int sdp_parse_fmtp_config_h264(AVStream *stream, static int sdp_parse_fmtp_config_h264(AVFormatContext *s,
AVStream *stream,
PayloadContext *h264_data, PayloadContext *h264_data,
char *attr, char *value) char *attr, char *value)
{ {
@ -73,7 +74,7 @@ static int sdp_parse_fmtp_config_h264(AVStream *stream,
assert(h264_data != NULL); assert(h264_data != NULL);
if (!strcmp(attr, "packetization-mode")) { if (!strcmp(attr, "packetization-mode")) {
av_log(codec, AV_LOG_DEBUG, "RTP Packetization Mode: %d\n", atoi(value)); av_log(s, AV_LOG_DEBUG, "RTP Packetization Mode: %d\n", atoi(value));
h264_data->packetization_mode = atoi(value); h264_data->packetization_mode = atoi(value);
/* /*
* Packetization Mode: * Packetization Mode:
@ -83,7 +84,7 @@ static int sdp_parse_fmtp_config_h264(AVStream *stream,
* and 29 (FU-B) are allowed. * and 29 (FU-B) are allowed.
*/ */
if (h264_data->packetization_mode > 1) if (h264_data->packetization_mode > 1)
av_log(codec, AV_LOG_ERROR, av_log(s, AV_LOG_ERROR,
"Interleaved RTP mode is not supported yet.\n"); "Interleaved RTP mode is not supported yet.\n");
} else if (!strcmp(attr, "profile-level-id")) { } else if (!strcmp(attr, "profile-level-id")) {
if (strlen(value) == 6) { if (strlen(value) == 6) {
@ -104,7 +105,7 @@ static int sdp_parse_fmtp_config_h264(AVStream *stream,
buffer[1] = value[5]; buffer[1] = value[5];
level_idc = strtol(buffer, NULL, 16); level_idc = strtol(buffer, NULL, 16);
av_log(codec, AV_LOG_DEBUG, av_log(s, AV_LOG_DEBUG,
"RTP Profile IDC: %x Profile IOP: %x Level: %x\n", "RTP Profile IDC: %x Profile IOP: %x Level: %x\n",
profile_idc, profile_iop, level_idc); profile_idc, profile_iop, level_idc);
h264_data->profile_idc = profile_idc; h264_data->profile_idc = profile_idc;
@ -137,7 +138,7 @@ static int sdp_parse_fmtp_config_h264(AVStream *stream,
codec->extradata_size + codec->extradata_size +
FF_INPUT_BUFFER_PADDING_SIZE); FF_INPUT_BUFFER_PADDING_SIZE);
if (!dest) { if (!dest) {
av_log(codec, AV_LOG_ERROR, av_log(s, AV_LOG_ERROR,
"Unable to allocate memory for extradata!\n"); "Unable to allocate memory for extradata!\n");
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
} }
@ -157,7 +158,7 @@ static int sdp_parse_fmtp_config_h264(AVStream *stream,
codec->extradata_size += sizeof(start_sequence) + packet_size; codec->extradata_size += sizeof(start_sequence) + packet_size;
} }
} }
av_log(codec, AV_LOG_DEBUG, "Extradata set to %p (size: %d)!\n", av_log(s, AV_LOG_DEBUG, "Extradata set to %p (size: %d)!\n",
codec->extradata, codec->extradata_size); codec->extradata, codec->extradata_size);
} }
return 0; return 0;
@ -385,7 +386,7 @@ static int parse_h264_sdp_line(AVFormatContext *s, int st_index,
codec->width = atoi(buf1); codec->width = atoi(buf1);
codec->height = atoi(p + 1); // skip the - codec->height = atoi(p + 1); // skip the -
} else if (av_strstart(p, "fmtp:", &p)) { } else if (av_strstart(p, "fmtp:", &p)) {
return ff_parse_fmtp(stream, h264_data, p, sdp_parse_fmtp_config_h264); return ff_parse_fmtp(s, stream, h264_data, p, sdp_parse_fmtp_config_h264);
} else if (av_strstart(p, "cliprect:", &p)) { } else if (av_strstart(p, "cliprect:", &p)) {
// could use this if we wanted. // could use this if we wanted.
} }

View File

@ -23,7 +23,8 @@
#include "rtpdec_formats.h" #include "rtpdec_formats.h"
#include "libavutil/avstring.h" #include "libavutil/avstring.h"
static int ilbc_parse_fmtp(AVStream *stream, PayloadContext *data, static int ilbc_parse_fmtp(AVFormatContext *s,
AVStream *stream, PayloadContext *data,
char *attr, char *value) char *attr, char *value)
{ {
if (!strcmp(attr, "mode")) { if (!strcmp(attr, "mode")) {
@ -36,7 +37,7 @@ static int ilbc_parse_fmtp(AVStream *stream, PayloadContext *data,
stream->codec->block_align = 50; stream->codec->block_align = 50;
break; break;
default: default:
av_log(NULL, AV_LOG_ERROR, "Unsupported iLBC mode %d\n", mode); av_log(s, AV_LOG_ERROR, "Unsupported iLBC mode %d\n", mode);
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
} }
@ -54,7 +55,7 @@ static int ilbc_parse_sdp_line(AVFormatContext *s, int st_index,
st = s->streams[st_index]; st = s->streams[st_index];
if (av_strstart(line, "fmtp:", &p)) { if (av_strstart(line, "fmtp:", &p)) {
int ret = ff_parse_fmtp(st, data, p, ilbc_parse_fmtp); int ret = ff_parse_fmtp(s, st, data, p, ilbc_parse_fmtp);
if (ret < 0) if (ret < 0)
return ret; return ret;
if (!st->codec->block_align) { if (!st->codec->block_align) {

View File

@ -145,7 +145,8 @@ end:
return ret; return ret;
} }
static int parse_fmtp(AVStream *stream, PayloadContext *data, static int parse_fmtp(AVFormatContext *s,
AVStream *stream, PayloadContext *data,
char *attr, char *value) char *attr, char *value)
{ {
int res; int res;
@ -157,7 +158,7 @@ static int parse_fmtp(AVStream *stream, PayloadContext *data,
} else if (!strcmp(attr, "cpresent")) { } else if (!strcmp(attr, "cpresent")) {
int cpresent = atoi(value); int cpresent = atoi(value);
if (cpresent != 0) if (cpresent != 0)
avpriv_request_sample(NULL, avpriv_request_sample(s,
"RTP MP4A-LATM with in-band configuration"); "RTP MP4A-LATM with in-band configuration");
} }
@ -173,7 +174,7 @@ static int latm_parse_sdp_line(AVFormatContext *s, int st_index,
return 0; return 0;
if (av_strstart(line, "fmtp:", &p)) if (av_strstart(line, "fmtp:", &p))
return ff_parse_fmtp(s->streams[st_index], data, p, parse_fmtp); return ff_parse_fmtp(s, s->streams[st_index], data, p, parse_fmtp);
return 0; return 0;
} }

View File

@ -210,7 +210,8 @@ static int aac_parse_packet(AVFormatContext *ctx, PayloadContext *data,
return 0; return 0;
} }
static int parse_fmtp(AVStream *stream, PayloadContext *data, static int parse_fmtp(AVFormatContext *s,
AVStream *stream, PayloadContext *data,
char *attr, char *value) char *attr, char *value)
{ {
AVCodecContext *codec = stream->codec; AVCodecContext *codec = stream->codec;
@ -248,7 +249,7 @@ static int parse_sdp_line(AVFormatContext *s, int st_index,
return 0; return 0;
if (av_strstart(line, "fmtp:", &p)) if (av_strstart(line, "fmtp:", &p))
return ff_parse_fmtp(s->streams[st_index], data, p, parse_fmtp); return ff_parse_fmtp(s, s->streams[st_index], data, p, parse_fmtp);
return 0; return 0;
} }

View File

@ -306,7 +306,8 @@ parse_packed_headers(const uint8_t * packed_headers,
return 0; return 0;
} }
static int xiph_parse_fmtp_pair(AVStream* stream, static int xiph_parse_fmtp_pair(AVFormatContext *s,
AVStream* stream,
PayloadContext *xiph_data, PayloadContext *xiph_data,
char *attr, char *value) char *attr, char *value)
{ {
@ -321,7 +322,7 @@ static int xiph_parse_fmtp_pair(AVStream* stream,
} else if (!strcmp(value, "YCbCr-4:4:4")) { } else if (!strcmp(value, "YCbCr-4:4:4")) {
codec->pix_fmt = AV_PIX_FMT_YUV444P; codec->pix_fmt = AV_PIX_FMT_YUV444P;
} else { } else {
av_log(codec, AV_LOG_ERROR, av_log(s, AV_LOG_ERROR,
"Unsupported pixel format %s\n", attr); "Unsupported pixel format %s\n", attr);
return AVERROR_INVALIDDATA; return AVERROR_INVALIDDATA;
} }
@ -360,12 +361,12 @@ static int xiph_parse_fmtp_pair(AVStream* stream,
(decoded_packet, decoded_packet + packet_size, codec, (decoded_packet, decoded_packet + packet_size, codec,
xiph_data); xiph_data);
} else { } else {
av_log(codec, AV_LOG_ERROR, av_log(s, AV_LOG_ERROR,
"Out of memory while decoding SDP configuration.\n"); "Out of memory while decoding SDP configuration.\n");
result = AVERROR(ENOMEM); result = AVERROR(ENOMEM);
} }
} else { } else {
av_log(codec, AV_LOG_ERROR, "Packet too large\n"); av_log(s, AV_LOG_ERROR, "Packet too large\n");
result = AVERROR_INVALIDDATA; result = AVERROR_INVALIDDATA;
} }
av_free(decoded_packet); av_free(decoded_packet);
@ -382,7 +383,7 @@ static int xiph_parse_sdp_line(AVFormatContext *s, int st_index,
return 0; return 0;
if (av_strstart(line, "fmtp:", &p)) { if (av_strstart(line, "fmtp:", &p)) {
return ff_parse_fmtp(s->streams[st_index], data, p, return ff_parse_fmtp(s, s->streams[st_index], data, p,
xiph_parse_fmtp_pair); xiph_parse_fmtp_pair);
} }