1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

avformat/dashenc: segmentation at the configured segment duration rate

When use_template is enabled and use_timeline is disabled, typically
it is required to generate the segments at the configured segment duration
rate on an average. This commit is particularly needed to handle the
segmentation when video frame rates are fractional like 29.97 or 59.94 fps.
This commit is contained in:
Vishwanath Dixit 2018-04-11 12:30:18 +05:30 committed by Karthick Jeyapal
parent 01ba52852d
commit ab789e184c
2 changed files with 15 additions and 3 deletions

View File

@ -228,7 +228,10 @@ ffmpeg -re -i <input> -map 0 -map 0 -c:a libfdk_aac -c:v libx264
@item -min_seg_duration @var{microseconds} @item -min_seg_duration @var{microseconds}
This is a deprecated option to set the segment length in microseconds, use @var{seg_duration} instead. This is a deprecated option to set the segment length in microseconds, use @var{seg_duration} instead.
@item -seg_duration @var{duration} @item -seg_duration @var{duration}
Set the segment length in seconds (fractional value can be set). Set the segment length in seconds (fractional value can be set). The value is
treated as average segment duration when @var{use_template} is enabled and
@var{use_timeline} is disabled and as minimum segment duration for all the other
use cases.
@item -window_size @var{size} @item -window_size @var{size}
Set the maximum number of segments kept in the manifest. Set the maximum number of segments kept in the manifest.
@item -extra_window_size @var{size} @item -extra_window_size @var{size}

View File

@ -1267,6 +1267,7 @@ static int dash_write_packet(AVFormatContext *s, AVPacket *pkt)
DASHContext *c = s->priv_data; DASHContext *c = s->priv_data;
AVStream *st = s->streams[pkt->stream_index]; AVStream *st = s->streams[pkt->stream_index];
OutputStream *os = &c->streams[pkt->stream_index]; OutputStream *os = &c->streams[pkt->stream_index];
int64_t seg_end_duration, elapsed_duration;
int ret; int ret;
ret = update_stream_extradata(s, os, st->codecpar); ret = update_stream_extradata(s, os, st->codecpar);
@ -1294,10 +1295,18 @@ static int dash_write_packet(AVFormatContext *s, AVPacket *pkt)
if (os->first_pts == AV_NOPTS_VALUE) if (os->first_pts == AV_NOPTS_VALUE)
os->first_pts = pkt->pts; os->first_pts = pkt->pts;
if (c->use_template && !c->use_timeline) {
elapsed_duration = pkt->pts - os->first_pts;
seg_end_duration = (int64_t) os->segment_index * c->seg_duration;
} else {
elapsed_duration = pkt->pts - os->start_pts;
seg_end_duration = c->seg_duration;
}
if ((!c->has_video || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) && if ((!c->has_video || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) &&
pkt->flags & AV_PKT_FLAG_KEY && os->packets_written && pkt->flags & AV_PKT_FLAG_KEY && os->packets_written &&
av_compare_ts(pkt->pts - os->start_pts, st->time_base, av_compare_ts(elapsed_duration, st->time_base,
c->seg_duration, AV_TIME_BASE_Q) >= 0) { seg_end_duration, AV_TIME_BASE_Q) >= 0) {
int64_t prev_duration = c->last_duration; int64_t prev_duration = c->last_duration;
c->last_duration = av_rescale_q(pkt->pts - os->start_pts, c->last_duration = av_rescale_q(pkt->pts - os->start_pts,