1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

dnxhdenc: use the AVFrame API properly.

This commit is contained in:
Anton Khirnov 2013-11-09 10:14:46 +01:00
parent d2287740d9
commit 57e7b3a89f
2 changed files with 14 additions and 15 deletions

View File

@ -307,9 +307,12 @@ static av_cold int dnxhd_encode_init(AVCodecContext *avctx)
FF_ALLOCZ_OR_GOTO(ctx->m.avctx, ctx->mb_bits, ctx->m.mb_num *sizeof(uint16_t), fail);
FF_ALLOCZ_OR_GOTO(ctx->m.avctx, ctx->mb_qscale, ctx->m.mb_num *sizeof(uint8_t), fail);
ctx->frame.key_frame = 1;
ctx->frame.pict_type = AV_PICTURE_TYPE_I;
ctx->m.avctx->coded_frame = &ctx->frame;
avctx->coded_frame = av_frame_alloc();
if (!avctx->coded_frame)
return AVERROR(ENOMEM);
avctx->coded_frame->key_frame = 1;
avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
if (avctx->thread_count > MAX_THREADS) {
av_log(avctx, AV_LOG_ERROR, "too many threads\n");
@ -908,19 +911,14 @@ static void dnxhd_load_picture(DNXHDEncContext *ctx, const AVFrame *frame)
{
int i;
for (i = 0; i < 3; i++) {
ctx->frame.data[i] = frame->data[i];
ctx->frame.linesize[i] = frame->linesize[i];
}
for (i = 0; i < ctx->m.avctx->thread_count; i++) {
ctx->thread[i]->m.linesize = ctx->frame.linesize[0]<<ctx->interlaced;
ctx->thread[i]->m.uvlinesize = ctx->frame.linesize[1]<<ctx->interlaced;
ctx->thread[i]->m.linesize = frame->linesize[0] << ctx->interlaced;
ctx->thread[i]->m.uvlinesize = frame->linesize[1] << ctx->interlaced;
ctx->thread[i]->dct_y_offset = ctx->m.linesize *8;
ctx->thread[i]->dct_uv_offset = ctx->m.uvlinesize*8;
}
ctx->frame.interlaced_frame = frame->interlaced_frame;
ctx->m.avctx->coded_frame->interlaced_frame = frame->interlaced_frame;
ctx->cur_field = frame->interlaced_frame && !frame->top_field_first;
}
@ -942,9 +940,9 @@ static int dnxhd_encode_picture(AVCodecContext *avctx, AVPacket *pkt,
encode_coding_unit:
for (i = 0; i < 3; i++) {
ctx->src[i] = ctx->frame.data[i];
ctx->src[i] = frame->data[i];
if (ctx->interlaced && ctx->cur_field)
ctx->src[i] += ctx->frame.linesize[i];
ctx->src[i] += frame->linesize[i];
}
dnxhd_write_header(avctx, buf);
@ -982,7 +980,7 @@ static int dnxhd_encode_picture(AVCodecContext *avctx, AVPacket *pkt,
goto encode_coding_unit;
}
ctx->frame.quality = ctx->qscale*FF_QP2LAMBDA;
avctx->coded_frame->quality = ctx->qscale * FF_QP2LAMBDA;
pkt->flags |= AV_PKT_FLAG_KEY;
*got_packet = 1;
@ -1015,6 +1013,8 @@ static av_cold int dnxhd_encode_end(AVCodecContext *avctx)
for (i = 1; i < avctx->thread_count; i++)
av_freep(&ctx->thread[i]);
av_frame_free(&avctx->coded_frame);
return 0;
}

View File

@ -43,7 +43,6 @@ typedef struct DNXHDEncContext {
AVClass *class;
MpegEncContext m; ///< Used for quantization dsp functions
AVFrame frame;
int cid;
const CIDEntry *cid_table;
uint8_t *msip; ///< Macroblock Scan Indexes Payload