1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00
FFmpeg/libavformat/rtpenc_chain.c
Martin Storsjö 4a4a7e138c rtpenc_chain: Use the original AVFormatContext for getting payload type
In ff_rtp_get_payload_type, the AVFormatContext is used for checking
whether the payload_type or rtpflags options are set. In rtpenc_chain,
the rtpctx struct is a newly initialized struct where no options have
been set yet, so no options can be fetched from there.

All muxers that internally chain rtp muxers have the "rtpflags" field
that allows passing such options on (which is how this worked before
8034130e06), so this works just as intended.

This makes it possible to produce H263 in RFC2190 format with chained
RTP muxers.

CC: libav-stable@libav.org
Signed-off-by: Martin Storsjö <martin@martin.st>
2013-01-24 11:31:36 +02:00

105 lines
3.1 KiB
C

/*
* RTP muxer chaining code
* Copyright (c) 2010 Martin Storsjo
*
* This file is part of Libav.
*
* Libav is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Libav is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "avformat.h"
#include "avio_internal.h"
#include "rtpenc_chain.h"
#include "avio_internal.h"
#include "rtp.h"
#include "libavutil/opt.h"
int ff_rtp_chain_mux_open(AVFormatContext **out, AVFormatContext *s,
AVStream *st, URLContext *handle, int packet_size,
int idx)
{
AVFormatContext *rtpctx = NULL;
int ret;
AVOutputFormat *rtp_format = av_guess_format("rtp", NULL, NULL);
uint8_t *rtpflags;
AVDictionary *opts = NULL;
if (!rtp_format) {
ret = AVERROR(ENOSYS);
goto fail;
}
/* Allocate an AVFormatContext for each output stream */
rtpctx = avformat_alloc_context();
if (!rtpctx) {
ret = AVERROR(ENOMEM);
goto fail;
}
rtpctx->oformat = rtp_format;
if (!avformat_new_stream(rtpctx, NULL)) {
ret = AVERROR(ENOMEM);
goto fail;
}
/* Pass the interrupt callback on */
rtpctx->interrupt_callback = s->interrupt_callback;
/* Copy the max delay setting; the rtp muxer reads this. */
rtpctx->max_delay = s->max_delay;
/* Copy other stream parameters. */
rtpctx->streams[0]->sample_aspect_ratio = st->sample_aspect_ratio;
/* Get the payload type from the codec */
if (st->id < RTP_PT_PRIVATE)
rtpctx->streams[0]->id =
ff_rtp_get_payload_type(s, st->codec, idx);
else
rtpctx->streams[0]->id = st->id;
if (av_opt_get(s, "rtpflags", AV_OPT_SEARCH_CHILDREN, &rtpflags) >= 0)
av_dict_set(&opts, "rtpflags", rtpflags, AV_DICT_DONT_STRDUP_VAL);
/* Set the synchronized start time. */
rtpctx->start_time_realtime = s->start_time_realtime;
avcodec_copy_context(rtpctx->streams[0]->codec, st->codec);
if (handle) {
ffio_fdopen(&rtpctx->pb, handle);
} else
ffio_open_dyn_packet_buf(&rtpctx->pb, packet_size);
ret = avformat_write_header(rtpctx, &opts);
av_dict_free(&opts);
if (ret) {
if (handle) {
avio_close(rtpctx->pb);
} else {
uint8_t *ptr;
avio_close_dyn_buf(rtpctx->pb, &ptr);
av_free(ptr);
}
avformat_free_context(rtpctx);
return ret;
}
*out = rtpctx;
return 0;
fail:
av_free(rtpctx);
if (handle)
ffurl_close(handle);
return ret;
}