1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

fftools/ffmpeg: attach bits_per_raw_sample information to frames

This way avoids encoders reaching into filters or decoders for this
information.
This commit is contained in:
Anton Khirnov 2023-05-27 17:38:36 +02:00
parent 7d4e00ccf0
commit 5293adb1a7
5 changed files with 41 additions and 41 deletions

View File

@ -315,9 +315,6 @@ typedef struct FilterGraph {
int index;
AVFilterGraph *graph;
// true when the filtergraph contains only meta filters
// that do not modify the frame data
int is_meta;
InputFilter **inputs;
int nb_inputs;
@ -339,8 +336,6 @@ typedef struct InputStream {
int decoding_needed; /* non zero if the packets must be decoded in 'raw_fifo', see DECODING_FOR_* */
#define DECODING_FOR_OST 1
#define DECODING_FOR_FILTER 2
// should attach FrameData as opaque_ref after decoding
int want_frame_data;
/**
* Codec parameters - to be used by the decoding/streamcopy code.
@ -653,6 +648,8 @@ typedef struct FrameData {
AVRational tb;
AVRational frame_rate_filter;
int bits_per_raw_sample;
} FrameData;
extern InputFile **input_files;

View File

@ -472,6 +472,7 @@ int dec_packet(InputStream *ist, const AVPacket *pkt, int no_eof)
while (1) {
AVFrame *frame = d->frame;
FrameData *fd;
update_benchmark(NULL);
ret = avcodec_receive_frame(dec, frame);
@ -508,19 +509,17 @@ int dec_packet(InputStream *ist, const AVPacket *pkt, int no_eof)
exit_program(1);
}
if (ist->want_frame_data) {
FrameData *fd;
av_assert0(!frame->opaque_ref);
fd = frame_data(frame);
if (!fd) {
av_frame_unref(frame);
report_and_exit(AVERROR(ENOMEM));
}
fd->pts = frame->pts;
fd->tb = dec->pkt_timebase;
fd->idx = dec->frame_num - 1;
av_assert0(!frame->opaque_ref);
fd = frame_data(frame);
if (!fd) {
av_frame_unref(frame);
report_and_exit(AVERROR(ENOMEM));
}
fd->pts = frame->pts;
fd->tb = dec->pkt_timebase;
fd->idx = dec->frame_num - 1;
fd->bits_per_raw_sample = dec->bits_per_raw_sample;
frame->time_base = dec->pkt_timebase;

View File

@ -198,6 +198,7 @@ int enc_open(OutputStream *ost, AVFrame *frame)
AVCodecContext *dec_ctx = NULL;
const AVCodec *enc = enc_ctx->codec;
OutputFile *of = output_files[ost->file_index];
FrameData *fd = frame ? frame_data(frame) : NULL;
int ret;
if (e->opened)
@ -219,8 +220,8 @@ int enc_open(OutputStream *ost, AVFrame *frame)
if (ost->bits_per_raw_sample)
enc_ctx->bits_per_raw_sample = ost->bits_per_raw_sample;
else if (dec_ctx && ost->filter->graph->is_meta)
enc_ctx->bits_per_raw_sample = FFMIN(dec_ctx->bits_per_raw_sample,
else if (fd)
enc_ctx->bits_per_raw_sample = FFMIN(fd->bits_per_raw_sample,
av_get_bytes_per_sample(enc_ctx->sample_fmt) << 3);
enc_ctx->time_base = ost->enc_timebase.num > 0 ? ost->enc_timebase :
@ -230,10 +231,8 @@ int enc_open(OutputStream *ost, AVFrame *frame)
case AVMEDIA_TYPE_VIDEO: {
AVRational fr = ost->frame_rate;
if (!fr.num && frame) {
FrameData *fd = frame_data(frame);
if (!fr.num && fd)
fr = fd->frame_rate_filter;
}
if (!fr.num && !ost->max_frame_rate.num) {
fr = (AVRational){25, 1};
av_log(ost, AV_LOG_WARNING,
@ -282,8 +281,8 @@ int enc_open(OutputStream *ost, AVFrame *frame)
if (ost->bits_per_raw_sample)
enc_ctx->bits_per_raw_sample = ost->bits_per_raw_sample;
else if (dec_ctx && ost->filter->graph->is_meta)
enc_ctx->bits_per_raw_sample = FFMIN(dec_ctx->bits_per_raw_sample,
else if (fd)
enc_ctx->bits_per_raw_sample = FFMIN(fd->bits_per_raw_sample,
av_pix_fmt_desc_get(enc_ctx->pix_fmt)->comp[0].depth);
if (frame) {

View File

@ -45,6 +45,9 @@ typedef struct FilterGraphPriv {
char log_name[32];
int is_simple;
// true when the filtergraph contains only meta filters
// that do not modify the frame data
int is_meta;
const char *graph_desc;
@ -1566,7 +1569,7 @@ static int configure_filtergraph(FilterGraph *fg)
if ((ret = avfilter_graph_config(fg->graph, NULL)) < 0)
goto fail;
fg->is_meta = graph_is_meta(fg->graph);
fgp->is_meta = graph_is_meta(fg->graph);
/* limit the lists of allowed formats to the ones selected, to
* make sure they stay the same if the filtergraph is reconfigured later */
@ -1714,6 +1717,8 @@ int reap_filters(int flush)
filtered_frame = fgp->frame;
while (1) {
FrameData *fd;
ret = av_buffersink_get_frame_flags(filter, filtered_frame,
AV_BUFFERSINK_FLAG_NO_REQUEST);
if (ret < 0) {
@ -1744,16 +1749,20 @@ int reap_filters(int flush)
tb.num, tb.den);
}
if (ost->type == AVMEDIA_TYPE_VIDEO) {
FrameData *fd = frame_data(filtered_frame);
if (!fd) {
av_frame_unref(filtered_frame);
report_and_exit(AVERROR(ENOMEM));
}
fd->frame_rate_filter = av_buffersink_get_frame_rate(filter);
fd = frame_data(filtered_frame);
if (!fd) {
av_frame_unref(filtered_frame);
report_and_exit(AVERROR(ENOMEM));
}
// only use bits_per_raw_sample passed through from the decoder
// if the filtergraph did not touch the frame data
if (!fgp->is_meta)
fd->bits_per_raw_sample = 0;
if (ost->type == AVMEDIA_TYPE_VIDEO)
fd->frame_rate_filter = av_buffersink_get_frame_rate(filter);
enc_frame(ost, filtered_frame);
av_frame_unref(filtered_frame);
}

View File

@ -365,15 +365,11 @@ static int enc_stats_init(OutputStream *ost, EncStats *es, int pre,
c->type = fmt_specs[i].type;
if (fmt_specs[i].need_input_data) {
if (ost->ist)
ost->ist->want_frame_data = 1;
else {
av_log(ost, AV_LOG_WARNING,
"Format directive '%s' is unavailable, because "
"this output stream has no associated input stream\n",
val);
}
if (fmt_specs[i].need_input_data && !ost->ist) {
av_log(ost, AV_LOG_WARNING,
"Format directive '%s' is unavailable, because "
"this output stream has no associated input stream\n",
val);
}
break;