From c8cf461c19e8e35df4b7364d9b90aa42f1ab4560 Mon Sep 17 00:00:00 2001 From: Tim Walker Date: Sat, 12 Apr 2014 22:11:52 +0200 Subject: [PATCH] dcadec: Do not decode the XCh extension when downmixing to stereo This is neither necessary nor currently supported. Signed-off-by: Luca Barbato --- libavcodec/dcadec.c | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/libavcodec/dcadec.c b/libavcodec/dcadec.c index 92edce7e42..49d7aac532 100644 --- a/libavcodec/dcadec.c +++ b/libavcodec/dcadec.c @@ -1850,6 +1850,16 @@ static int dca_decode_frame(AVCodecContext *avctx, void *data, if (s->amode < 16) { avctx->channel_layout = dca_core_channel_layout[s->amode]; + if (s->prim_channels + !!s->lfe > 2 && + avctx->request_channel_layout == AV_CH_LAYOUT_STEREO) { + /* + * Neither the core's auxiliary data nor our default tables contain + * downmix coefficients for the additional channel coded in the XCh + * extension, so when we're doing a Stereo downmix, don't decode it. + */ + s->xch_disable = 1; + } + #if FF_API_REQUEST_CHANNELS FF_DISABLE_DEPRECATION_WARNINGS if (s->xch_present && !s->xch_disable &&