1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2025-08-15 14:13:16 +02:00

lavc/vaapi_encode_h265: Add GPB frame support for hevc_vaapi

Use GPB frames to replace regular P/B frames if backend driver does not
support it.

- GPB:
    Generalized P and B picture. Regular P/B frames replaced by B
    frames with previous-predict only, L0 == L1. Normal B frames
    still have 2 different ref_lists and allow bi-prediction

Signed-off-by: Linjie Fu <linjie.fu@intel.com>
Signed-off-by: Fei Wang <fei.w.wang@intel.com>
This commit is contained in:
Linjie Fu
2022-03-17 14:41:49 +08:00
committed by Haihao Xiang
parent 6e45acd23b
commit a285968a0b
3 changed files with 78 additions and 5 deletions

View File

@@ -1875,6 +1875,7 @@ static av_cold int vaapi_encode_init_gop_structure(AVCodecContext *avctx)
VAStatus vas;
VAConfigAttrib attr = { VAConfigAttribEncMaxRefFrames };
uint32_t ref_l0, ref_l1;
int prediction_pre_only;
vas = vaGetConfigAttributes(ctx->hwctx->display,
ctx->va_profile,
@@ -1893,6 +1894,51 @@ static av_cold int vaapi_encode_init_gop_structure(AVCodecContext *avctx)
ref_l1 = attr.value >> 16 & 0xffff;
}
ctx->p_to_gpb = 0;
prediction_pre_only = 0;
#if VA_CHECK_VERSION(1, 9, 0)
if (!(ctx->codec->flags & FLAG_INTRA_ONLY ||
avctx->gop_size <= 1)) {
attr = (VAConfigAttrib) { VAConfigAttribPredictionDirection };
vas = vaGetConfigAttributes(ctx->hwctx->display,
ctx->va_profile,
ctx->va_entrypoint,
&attr, 1);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_WARNING, "Failed to query prediction direction "
"attribute: %d (%s).\n", vas, vaErrorStr(vas));
return AVERROR_EXTERNAL;
} else if (attr.value == VA_ATTRIB_NOT_SUPPORTED) {
av_log(avctx, AV_LOG_VERBOSE, "Driver does not report any additional "
"prediction constraints.\n");
} else {
if (((ref_l0 > 0 || ref_l1 > 0) && !(attr.value & VA_PREDICTION_DIRECTION_PREVIOUS)) ||
((ref_l1 == 0) && (attr.value & (VA_PREDICTION_DIRECTION_FUTURE | VA_PREDICTION_DIRECTION_BI_NOT_EMPTY)))) {
av_log(avctx, AV_LOG_ERROR, "Driver report incorrect prediction "
"direction attribute.\n");
return AVERROR_EXTERNAL;
}
if (!(attr.value & VA_PREDICTION_DIRECTION_FUTURE)) {
if (ref_l0 > 0 && ref_l1 > 0) {
prediction_pre_only = 1;
av_log(avctx, AV_LOG_VERBOSE, "Driver only support same reference "
"lists for B-frames.\n");
}
}
if (attr.value & VA_PREDICTION_DIRECTION_BI_NOT_EMPTY) {
if (ref_l0 > 0 && ref_l1 > 0) {
ctx->p_to_gpb = 1;
av_log(avctx, AV_LOG_VERBOSE, "Driver does not support P-frames, "
"replacing them with B-frames.\n");
}
}
}
}
#endif
if (ctx->codec->flags & FLAG_INTRA_ONLY ||
avctx->gop_size <= 1) {
av_log(avctx, AV_LOG_VERBOSE, "Using intra frames only.\n");
@@ -1902,15 +1948,26 @@ static av_cold int vaapi_encode_init_gop_structure(AVCodecContext *avctx)
"reference frames.\n");
return AVERROR(EINVAL);
} else if (!(ctx->codec->flags & FLAG_B_PICTURES) ||
ref_l1 < 1 || avctx->max_b_frames < 1) {
av_log(avctx, AV_LOG_VERBOSE, "Using intra and P-frames "
"(supported references: %d / %d).\n", ref_l0, ref_l1);
ref_l1 < 1 || avctx->max_b_frames < 1 ||
prediction_pre_only) {
if (ctx->p_to_gpb)
av_log(avctx, AV_LOG_VERBOSE, "Using intra and B-frames "
"(supported references: %d / %d).\n",
ref_l0, ref_l1);
else
av_log(avctx, AV_LOG_VERBOSE, "Using intra and P-frames "
"(supported references: %d / %d).\n", ref_l0, ref_l1);
ctx->gop_size = avctx->gop_size;
ctx->p_per_i = INT_MAX;
ctx->b_per_p = 0;
} else {
av_log(avctx, AV_LOG_VERBOSE, "Using intra, P- and B-frames "
"(supported references: %d / %d).\n", ref_l0, ref_l1);
if (ctx->p_to_gpb)
av_log(avctx, AV_LOG_VERBOSE, "Using intra and B-frames "
"(supported references: %d / %d).\n",
ref_l0, ref_l1);
else
av_log(avctx, AV_LOG_VERBOSE, "Using intra, P- and B-frames "
"(supported references: %d / %d).\n", ref_l0, ref_l1);
ctx->gop_size = avctx->gop_size;
ctx->p_per_i = INT_MAX;
ctx->b_per_p = avctx->max_b_frames;

View File

@@ -331,6 +331,7 @@ typedef struct VAAPIEncodeContext {
int idr_counter;
int gop_counter;
int end_of_stream;
int p_to_gpb;
// Whether the driver supports ROI at all.
int roi_allowed;

View File

@@ -886,6 +886,7 @@ static int vaapi_encode_h265_init_slice_params(AVCodecContext *avctx,
VAAPIEncodePicture *pic,
VAAPIEncodeSlice *slice)
{
VAAPIEncodeContext *ctx = avctx->priv_data;
VAAPIEncodeH265Context *priv = avctx->priv_data;
VAAPIEncodeH265Picture *hpic = pic->priv_data;
const H265RawSPS *sps = &priv->raw_sps;
@@ -908,6 +909,9 @@ static int vaapi_encode_h265_init_slice_params(AVCodecContext *avctx,
sh->slice_type = hpic->slice_type;
if (sh->slice_type == HEVC_SLICE_P && ctx->p_to_gpb)
sh->slice_type = HEVC_SLICE_B;
sh->slice_pic_order_cnt_lsb = hpic->pic_order_cnt &
(1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4)) - 1;
@@ -1066,6 +1070,9 @@ static int vaapi_encode_h265_init_slice_params(AVCodecContext *avctx,
av_assert0(pic->type == PICTURE_TYPE_P ||
pic->type == PICTURE_TYPE_B);
vslice->ref_pic_list0[0] = vpic->reference_frames[0];
if (ctx->p_to_gpb && pic->type == PICTURE_TYPE_P)
// Reference for GPB B-frame, L0 == L1
vslice->ref_pic_list1[0] = vpic->reference_frames[0];
}
if (pic->nb_refs >= 2) {
// Forward reference for B-frame.
@@ -1073,6 +1080,14 @@ static int vaapi_encode_h265_init_slice_params(AVCodecContext *avctx,
vslice->ref_pic_list1[0] = vpic->reference_frames[1];
}
if (pic->type == PICTURE_TYPE_P && ctx->p_to_gpb) {
vslice->slice_type = HEVC_SLICE_B;
for (i = 0; i < FF_ARRAY_ELEMS(vslice->ref_pic_list0); i++) {
vslice->ref_pic_list1[i].picture_id = vslice->ref_pic_list0[i].picture_id;
vslice->ref_pic_list1[i].flags = vslice->ref_pic_list0[i].flags;
}
}
return 0;
}