mirror of
https://github.com/FFmpeg/FFmpeg.git
synced 2024-12-23 12:43:46 +02:00
lavc/rawdec: Use 16-byte line alignment for B1W0 and B0W1 video in nut
Signed-off-by: Michael Niedermayer <michael@niedermayer.cc>
This commit is contained in:
parent
16af350ac5
commit
9556446623
@ -41,7 +41,11 @@ typedef struct RawVideoContext {
|
||||
AVBufferRef *palette;
|
||||
int frame_size; /* size of the frame in bytes */
|
||||
int flip;
|
||||
int is_1_2_4_8_bpp; // 1, 2, 4 and 8 bpp in avi/mov
|
||||
int is_1_2_4_8_bpp; // 1, 2, 4 and 8 bpp in avi/mov, 1 and 8 bpp in nut
|
||||
int is_mono;
|
||||
int is_pal8;
|
||||
int is_nut_mono;
|
||||
int is_nut_pal8;
|
||||
int is_yuv2;
|
||||
int is_lt_16bpp; // 16bpp pixfmt and bits_per_coded_sample < 16
|
||||
int tff;
|
||||
@ -96,7 +100,7 @@ static av_cold int raw_init_decoder(AVCodecContext *avctx)
|
||||
avpriv_set_systematic_pal2((uint32_t*)context->palette->data, avctx->pix_fmt);
|
||||
else {
|
||||
memset(context->palette->data, 0, AVPALETTE_SIZE);
|
||||
if (avctx->bits_per_coded_sample == 1)
|
||||
if (avctx->bits_per_coded_sample <= 1)
|
||||
memset(context->palette->data, 0xff, 4);
|
||||
}
|
||||
}
|
||||
@ -108,11 +112,24 @@ static av_cold int raw_init_decoder(AVCodecContext *avctx)
|
||||
avctx->codec_tag == MKTAG('W','R','A','W'))
|
||||
context->flip = 1;
|
||||
|
||||
if (avctx->pix_fmt == AV_PIX_FMT_MONOWHITE ||
|
||||
avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
|
||||
context->is_mono = 1;
|
||||
else if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
|
||||
context->is_pal8 = 1;
|
||||
|
||||
if (avctx->codec_tag == MKTAG('B','1','W','0') ||
|
||||
avctx->codec_tag == MKTAG('B','0','W','1'))
|
||||
context->is_nut_mono = 1;
|
||||
else if (avctx->codec_tag == MKTAG('P','A','L','8'))
|
||||
context->is_nut_pal8 = 1;
|
||||
|
||||
if (avctx->codec_tag == AV_RL32("yuv2") &&
|
||||
avctx->pix_fmt == AV_PIX_FMT_YUYV422)
|
||||
context->is_yuv2 = 1;
|
||||
|
||||
if (avctx->pix_fmt == AV_PIX_FMT_PAL8 && avctx->bits_per_coded_sample == 1)
|
||||
/* Temporary solution until PAL8 is implemented in nut */
|
||||
if (context->is_pal8 && avctx->bits_per_coded_sample == 1)
|
||||
avctx->pix_fmt = AV_PIX_FMT_NONE;
|
||||
|
||||
return 0;
|
||||
@ -160,22 +177,34 @@ static int raw_decode(AVCodecContext *avctx, void *data, int *got_frame,
|
||||
const uint8_t *buf = avpkt->data;
|
||||
int buf_size = avpkt->size;
|
||||
int linesize_align = 4;
|
||||
int avpkt_stride;
|
||||
int stride;
|
||||
int res, len;
|
||||
int need_copy;
|
||||
|
||||
AVFrame *frame = data;
|
||||
|
||||
if (avctx->width <= 0) {
|
||||
av_log(avctx, AV_LOG_ERROR, "width is not set\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
if (avctx->height <= 0) {
|
||||
av_log(avctx, AV_LOG_ERROR, "height is not set\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
avpkt_stride = avpkt->size / avctx->height;
|
||||
|
||||
if (avpkt_stride == 0) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Packet too small (%d) height (%d)\n", avpkt->size, avctx->height);
|
||||
if (context->is_nut_mono)
|
||||
stride = avctx->width / 8 + (avctx->width & 7 ? 1 : 0);
|
||||
else if (context->is_nut_pal8)
|
||||
stride = avctx->width;
|
||||
else
|
||||
stride = avpkt->size / avctx->height;
|
||||
|
||||
if (stride == 0 || avpkt->size < stride * avctx->height) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Packet too small (%d)\n", avpkt->size);
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
|
||||
/* Temporary solution until PAL8 is implemented in nut */
|
||||
if (avctx->pix_fmt == AV_PIX_FMT_NONE &&
|
||||
avctx->bits_per_coded_sample == 1 &&
|
||||
avctx->frame_number == 0 &&
|
||||
@ -185,18 +214,20 @@ static int raw_decode(AVCodecContext *avctx, void *data, int *got_frame,
|
||||
const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, NULL);
|
||||
if (!pal) {
|
||||
avctx->pix_fmt = AV_PIX_FMT_MONOWHITE;
|
||||
context->is_pal8 = 0;
|
||||
context->is_mono = 1;
|
||||
} else
|
||||
avctx->pix_fmt = AV_PIX_FMT_PAL8;
|
||||
}
|
||||
|
||||
desc = av_pix_fmt_desc_get(avctx->pix_fmt);
|
||||
|
||||
if ((avctx->bits_per_coded_sample == 8 || avctx->bits_per_coded_sample == 4
|
||||
|| avctx->bits_per_coded_sample == 2 || avctx->bits_per_coded_sample == 1) &&
|
||||
(avctx->pix_fmt == AV_PIX_FMT_PAL8 || avctx->pix_fmt == AV_PIX_FMT_MONOWHITE) &&
|
||||
(!avctx->codec_tag || avctx->codec_tag == MKTAG('r','a','w',' '))) {
|
||||
|| avctx->bits_per_coded_sample <= 2) &&
|
||||
(context->is_mono || context->is_pal8) &&
|
||||
(!avctx->codec_tag || avctx->codec_tag == MKTAG('r','a','w',' ') ||
|
||||
context->is_nut_mono || context->is_nut_pal8)) {
|
||||
context->is_1_2_4_8_bpp = 1;
|
||||
if (avctx->bits_per_coded_sample == 1 && avctx->pix_fmt == AV_PIX_FMT_MONOWHITE) {
|
||||
if (context->is_mono) {
|
||||
int row_bytes = avctx->width / 8 + (avctx->width & 7 ? 1 : 0);
|
||||
context->frame_size = av_image_get_buffer_size(avctx->pix_fmt,
|
||||
FFALIGN(row_bytes, 16) * 8,
|
||||
@ -240,19 +271,18 @@ static int raw_decode(AVCodecContext *avctx, void *data, int *got_frame,
|
||||
if (!frame->buf[0])
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
// 1, 2, 4 and 8 bpp in avi/mov
|
||||
// 1, 2, 4 and 8 bpp in avi/mov, 1 and 8 bpp in nut
|
||||
if (context->is_1_2_4_8_bpp) {
|
||||
int i, j, row_pix = 0;
|
||||
uint8_t *dst = frame->buf[0]->data;
|
||||
buf_size = context->frame_size -
|
||||
(avctx->pix_fmt == AV_PIX_FMT_PAL8 ? AVPALETTE_SIZE : 0);
|
||||
if (avctx->bits_per_coded_sample == 8 || avctx->pix_fmt == AV_PIX_FMT_MONOWHITE) {
|
||||
int pix_per_byte = avctx->pix_fmt == AV_PIX_FMT_MONOWHITE ? 8 : 1;
|
||||
buf_size = context->frame_size - (context->is_pal8 ? AVPALETTE_SIZE : 0);
|
||||
if (avctx->bits_per_coded_sample == 8 || context->is_nut_pal8 || context->is_mono) {
|
||||
int pix_per_byte = context->is_mono ? 8 : 1;
|
||||
for (i = 0, j = 0; j < buf_size && i<avpkt->size; i++, j++) {
|
||||
dst[j] = buf[i];
|
||||
row_pix += pix_per_byte;
|
||||
if (row_pix >= avctx->width) {
|
||||
i += avpkt_stride - (i % avpkt_stride) - 1;
|
||||
i += stride - (i % stride) - 1;
|
||||
j += 16 - (j % 16) - 1;
|
||||
row_pix = 0;
|
||||
}
|
||||
@ -263,7 +293,7 @@ static int raw_decode(AVCodecContext *avctx, void *data, int *got_frame,
|
||||
dst[2 * j + 1] = buf[i] & 15;
|
||||
row_pix += 2;
|
||||
if (row_pix >= avctx->width) {
|
||||
i += avpkt_stride - (i % avpkt_stride) - 1;
|
||||
i += stride - (i % stride) - 1;
|
||||
j += 8 - (j % 8) - 1;
|
||||
row_pix = 0;
|
||||
}
|
||||
@ -276,7 +306,7 @@ static int raw_decode(AVCodecContext *avctx, void *data, int *got_frame,
|
||||
dst[4 * j + 3] = buf[i] & 3;
|
||||
row_pix += 4;
|
||||
if (row_pix >= avctx->width) {
|
||||
i += avpkt_stride - (i % avpkt_stride) - 1;
|
||||
i += stride - (i % stride) - 1;
|
||||
j += 4 - (j % 4) - 1;
|
||||
row_pix = 0;
|
||||
}
|
||||
@ -294,7 +324,7 @@ static int raw_decode(AVCodecContext *avctx, void *data, int *got_frame,
|
||||
dst[8 * j + 7] = buf[i] & 1;
|
||||
row_pix += 8;
|
||||
if (row_pix >= avctx->width) {
|
||||
i += avpkt_stride - (i % avpkt_stride) - 1;
|
||||
i += stride - (i % stride) - 1;
|
||||
j += 2 - (j % 2) - 1;
|
||||
row_pix = 0;
|
||||
}
|
||||
@ -371,6 +401,7 @@ static int raw_decode(AVCodecContext *avctx, void *data, int *got_frame,
|
||||
avctx->pix_fmt==AV_PIX_FMT_RGB555BE ||
|
||||
avctx->pix_fmt==AV_PIX_FMT_RGB565LE ||
|
||||
avctx->pix_fmt==AV_PIX_FMT_MONOWHITE ||
|
||||
avctx->pix_fmt==AV_PIX_FMT_MONOBLACK ||
|
||||
avctx->pix_fmt==AV_PIX_FMT_PAL8) &&
|
||||
FFALIGN(frame->linesize[0], linesize_align) * avctx->height <= buf_size)
|
||||
frame->linesize[0] = FFALIGN(frame->linesize[0], linesize_align);
|
||||
|
Loading…
Reference in New Issue
Block a user