1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-11-21 10:55:51 +02:00

whitespace cosmetics: K&R coding style, prettyprinting

Originally committed as revision 20381 to svn://svn.ffmpeg.org/ffmpeg/trunk
This commit is contained in:
Diego Biurrun 2009-10-27 16:57:35 +00:00
parent bc8964ef20
commit d3067047e7
3 changed files with 170 additions and 164 deletions

View File

@ -18,6 +18,7 @@
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "avcodec.h"
#include "pnm.h"
@ -32,10 +33,10 @@ static void pnm_get(PNMContext *sc, char *str, int buf_size)
int c;
/* skip spaces and comments */
for(;;) {
for (;;) {
c = *sc->bytestream++;
if (c == '#') {
do {
do {
c = *sc->bytestream++;
} while (c != '\n' && sc->bytestream < sc->bytestream_end);
} else if (!pnm_space(c)) {
@ -52,7 +53,8 @@ static void pnm_get(PNMContext *sc, char *str, int buf_size)
*s = '\0';
}
int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s){
int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s)
{
char buf1[32], tuple_type[32];
int h, w, depth, maxval;
@ -67,12 +69,12 @@ int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s){
} else if (!strcmp(buf1, "P6")) {
avctx->pix_fmt = PIX_FMT_RGB24;
} else if (!strcmp(buf1, "P7")) {
w = -1;
h = -1;
w = -1;
h = -1;
maxval = -1;
depth = -1;
depth = -1;
tuple_type[0] = '\0';
for(;;) {
for (;;) {
pnm_get(s, buf1, sizeof(buf1));
if (!strcmp(buf1, "WIDTH")) {
pnm_get(s, buf1, sizeof(buf1));
@ -98,7 +100,7 @@ int ff_pnm_decode_header(AVCodecContext *avctx, PNMContext * const s){
if (w <= 0 || h <= 0 || maxval <= 0 || depth <= 0 || tuple_type[0] == '\0' || avcodec_check_dimensions(avctx, w, h))
return -1;
avctx->width = w;
avctx->width = w;
avctx->height = h;
if (depth == 1) {
if (maxval == 1)

View File

@ -23,63 +23,62 @@
#include "pnm.h"
static int pnm_parse(AVCodecParserContext *s,
AVCodecContext *avctx,
const uint8_t **poutbuf, int *poutbuf_size,
const uint8_t *buf, int buf_size)
static int pnm_parse(AVCodecParserContext *s, AVCodecContext *avctx,
const uint8_t **poutbuf, int *poutbuf_size,
const uint8_t *buf, int buf_size)
{
ParseContext *pc = s->priv_data;
PNMContext pnmctx;
int next;
for(; pc->overread>0; pc->overread--){
for (; pc->overread > 0; pc->overread--) {
pc->buffer[pc->index++]= pc->buffer[pc->overread_index++];
}
retry:
if(pc->index){
pnmctx.bytestream_start=
pnmctx.bytestream= pc->buffer;
pnmctx.bytestream_end= pc->buffer + pc->index;
}else{
pnmctx.bytestream_start=
pnmctx.bytestream= (uint8_t *) buf; /* casts avoid warnings */
pnmctx.bytestream_end= (uint8_t *) buf + buf_size;
if (pc->index) {
pnmctx.bytestream_start =
pnmctx.bytestream = pc->buffer;
pnmctx.bytestream_end = pc->buffer + pc->index;
} else {
pnmctx.bytestream_start =
pnmctx.bytestream = (uint8_t *) buf; /* casts avoid warnings */
pnmctx.bytestream_end = (uint8_t *) buf + buf_size;
}
if(ff_pnm_decode_header(avctx, &pnmctx) < 0){
if(pnmctx.bytestream < pnmctx.bytestream_end){
if(pc->index){
pc->index=0;
}else{
if (ff_pnm_decode_header(avctx, &pnmctx) < 0) {
if (pnmctx.bytestream < pnmctx.bytestream_end) {
if (pc->index) {
pc->index = 0;
} else {
buf++;
buf_size--;
}
goto retry;
}
#if 0
if(pc->index && pc->index*2 + FF_INPUT_BUFFER_PADDING_SIZE < pc->buffer_size && buf_size > pc->index){
if (pc->index && pc->index * 2 + FF_INPUT_BUFFER_PADDING_SIZE < pc->buffer_size && buf_size > pc->index) {
memcpy(pc->buffer + pc->index, buf, pc->index);
pc->index += pc->index;
buf += pc->index;
buf_size -= pc->index;
buf += pc->index;
buf_size -= pc->index;
goto retry;
}
#endif
next= END_NOT_FOUND;
}else{
next= pnmctx.bytestream - pnmctx.bytestream_start
+ avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height);
if(pnmctx.bytestream_start!=buf)
next-= pc->index;
if(next > buf_size)
next= END_NOT_FOUND;
next = END_NOT_FOUND;
} else {
next = pnmctx.bytestream - pnmctx.bytestream_start
+ avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height);
if (pnmctx.bytestream_start != buf)
next -= pc->index;
if (next > buf_size)
next = END_NOT_FOUND;
}
if(ff_combine_frame(pc, next, &buf, &buf_size)<0){
*poutbuf = NULL;
if (ff_combine_frame(pc, next, &buf, &buf_size) < 0) {
*poutbuf = NULL;
*poutbuf_size = 0;
return buf_size;
}
*poutbuf = buf;
*poutbuf = buf;
*poutbuf_size = buf_size;
return next;
}

View File

@ -18,51 +18,52 @@
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "avcodec.h"
#include "bytestream.h"
#include "pnm.h"
static av_cold int common_init(AVCodecContext *avctx){
static av_cold int common_init(AVCodecContext *avctx)
{
PNMContext *s = avctx->priv_data;
avcodec_get_frame_defaults((AVFrame*)&s->picture);
avctx->coded_frame= (AVFrame*)&s->picture;
avctx->coded_frame = (AVFrame*)&s->picture;
return 0;
}
static int pnm_decode_frame(AVCodecContext *avctx,
void *data, int *data_size,
AVPacket *avpkt)
static int pnm_decode_frame(AVCodecContext *avctx, void *data,
int *data_size, AVPacket *avpkt)
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
PNMContext * const s = avctx->priv_data;
AVFrame *picture = data;
AVFrame * const p= (AVFrame*)&s->picture;
AVFrame *picture = data;
AVFrame * const p = (AVFrame*)&s->picture;
int i, n, linesize, h, upgrade = 0;
unsigned char *ptr;
s->bytestream_start=
s->bytestream= buf;
s->bytestream_end= buf + buf_size;
s->bytestream_start =
s->bytestream = buf;
s->bytestream_end = buf + buf_size;
if(ff_pnm_decode_header(avctx, s) < 0)
if (ff_pnm_decode_header(avctx, s) < 0)
return -1;
if(p->data[0])
if (p->data[0])
avctx->release_buffer(avctx, p);
p->reference= 0;
if(avctx->get_buffer(avctx, p) < 0){
p->reference = 0;
if (avctx->get_buffer(avctx, p) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return -1;
}
p->pict_type= FF_I_TYPE;
p->key_frame= 1;
p->pict_type = FF_I_TYPE;
p->key_frame = 1;
switch(avctx->pix_fmt) {
switch (avctx->pix_fmt) {
default:
return -1;
case PIX_FMT_RGB48BE:
@ -86,47 +87,47 @@ static int pnm_decode_frame(AVCodecContext *avctx,
case PIX_FMT_MONOBLACK:
n = (avctx->width + 7) >> 3;
do_read:
ptr = p->data[0];
ptr = p->data[0];
linesize = p->linesize[0];
if(s->bytestream + n*avctx->height > s->bytestream_end)
if (s->bytestream + n * avctx->height > s->bytestream_end)
return -1;
for(i = 0; i < avctx->height; i++) {
for (i = 0; i < avctx->height; i++) {
if (!upgrade)
memcpy(ptr, s->bytestream, n);
else if (upgrade == 1) {
unsigned int j, f = (255*128 + s->maxval/2) / s->maxval;
for (j=0; j<n; j++)
unsigned int j, f = (255 * 128 + s->maxval / 2) / s->maxval;
for (j = 0; j < n; j++)
ptr[j] = (s->bytestream[j] * f + 64) >> 7;
} else if (upgrade == 2) {
unsigned int j, v, f = (65535*32768 + s->maxval/2) / s->maxval;
for (j=0; j<n/2; j++) {
unsigned int j, v, f = (65535 * 32768 + s->maxval / 2) / s->maxval;
for (j = 0; j < n / 2; j++) {
v = be2me_16(((uint16_t *)s->bytestream)[j]);
((uint16_t *)ptr)[j] = (v * f + 16384) >> 15;
}
}
s->bytestream += n;
ptr += linesize;
ptr += linesize;
}
break;
case PIX_FMT_YUV420P:
{
unsigned char *ptr1, *ptr2;
n = avctx->width;
ptr = p->data[0];
n = avctx->width;
ptr = p->data[0];
linesize = p->linesize[0];
if(s->bytestream + n*avctx->height*3/2 > s->bytestream_end)
if (s->bytestream + n * avctx->height * 3 / 2 > s->bytestream_end)
return -1;
for(i = 0; i < avctx->height; i++) {
for (i = 0; i < avctx->height; i++) {
memcpy(ptr, s->bytestream, n);
s->bytestream += n;
ptr += linesize;
ptr += linesize;
}
ptr1 = p->data[1];
ptr2 = p->data[2];
n >>= 1;
h = avctx->height >> 1;
for(i = 0; i < h; i++) {
for (i = 0; i < h; i++) {
memcpy(ptr1, s->bytestream, n);
s->bytestream += n;
memcpy(ptr2, s->bytestream, n);
@ -137,14 +138,14 @@ static int pnm_decode_frame(AVCodecContext *avctx,
}
break;
case PIX_FMT_RGB32:
ptr = p->data[0];
ptr = p->data[0];
linesize = p->linesize[0];
if(s->bytestream + avctx->width*avctx->height*4 > s->bytestream_end)
if (s->bytestream + avctx->width * avctx->height * 4 > s->bytestream_end)
return -1;
for(i = 0; i < avctx->height; i++) {
for (i = 0; i < avctx->height; i++) {
int j, r, g, b, a;
for(j = 0;j < avctx->width; j++) {
for (j = 0; j < avctx->width; j++) {
r = *s->bytestream++;
g = *s->bytestream++;
b = *s->bytestream++;
@ -155,66 +156,67 @@ static int pnm_decode_frame(AVCodecContext *avctx,
}
break;
}
*picture= *(AVFrame*)&s->picture;
*picture = *(AVFrame*)&s->picture;
*data_size = sizeof(AVPicture);
return s->bytestream - s->bytestream_start;
}
static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
PNMContext *s = avctx->priv_data;
AVFrame *pict = data;
AVFrame * const p= (AVFrame*)&s->picture;
static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf,
int buf_size, void *data)
{
PNMContext *s = avctx->priv_data;
AVFrame *pict = data;
AVFrame * const p = (AVFrame*)&s->picture;
int i, h, h1, c, n, linesize;
uint8_t *ptr, *ptr1, *ptr2;
if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
if (buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200) {
av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
return -1;
}
*p = *pict;
p->pict_type= FF_I_TYPE;
p->key_frame= 1;
*p = *pict;
p->pict_type = FF_I_TYPE;
p->key_frame = 1;
s->bytestream_start=
s->bytestream= outbuf;
s->bytestream_end= outbuf+buf_size;
s->bytestream_start =
s->bytestream = outbuf;
s->bytestream_end = outbuf + buf_size;
h = avctx->height;
h = avctx->height;
h1 = h;
switch(avctx->pix_fmt) {
switch (avctx->pix_fmt) {
case PIX_FMT_MONOWHITE:
c = '4';
n = (avctx->width + 7) >> 3;
c = '4';
n = (avctx->width + 7) >> 3;
break;
case PIX_FMT_GRAY8:
c = '5';
n = avctx->width;
c = '5';
n = avctx->width;
break;
case PIX_FMT_GRAY16BE:
c = '5';
n = avctx->width * 2;
c = '5';
n = avctx->width * 2;
break;
case PIX_FMT_RGB24:
c = '6';
n = avctx->width * 3;
c = '6';
n = avctx->width * 3;
break;
case PIX_FMT_RGB48BE:
c = '6';
n = avctx->width * 6;
c = '6';
n = avctx->width * 6;
break;
case PIX_FMT_YUV420P:
c = '5';
n = avctx->width;
c = '5';
n = avctx->width;
h1 = (h * 3) / 2;
break;
default:
return -1;
}
snprintf(s->bytestream, s->bytestream_end - s->bytestream,
"P%c\n%d %d\n",
c, avctx->width, h1);
"P%c\n%d %d\n", c, avctx->width, h1);
s->bytestream += strlen(s->bytestream);
if (avctx->pix_fmt != PIX_FMT_MONOWHITE) {
snprintf(s->bytestream, s->bytestream_end - s->bytestream,
@ -222,12 +224,12 @@ static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int bu
s->bytestream += strlen(s->bytestream);
}
ptr = p->data[0];
ptr = p->data[0];
linesize = p->linesize[0];
for(i=0;i<h;i++) {
for (i = 0; i < h; i++) {
memcpy(s->bytestream, ptr, n);
s->bytestream += n;
ptr += linesize;
ptr += linesize;
}
if (avctx->pix_fmt == PIX_FMT_YUV420P) {
@ -235,7 +237,7 @@ static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int bu
n >>= 1;
ptr1 = p->data[1];
ptr2 = p->data[2];
for(i=0;i<h;i++) {
for (i = 0; i < h; i++) {
memcpy(s->bytestream, ptr1, n);
s->bytestream += n;
memcpy(s->bytestream, ptr2, n);
@ -247,52 +249,54 @@ static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int bu
return s->bytestream - s->bytestream_start;
}
static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
PNMContext *s = avctx->priv_data;
AVFrame *pict = data;
AVFrame * const p= (AVFrame*)&s->picture;
static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf,
int buf_size, void *data)
{
PNMContext *s = avctx->priv_data;
AVFrame *pict = data;
AVFrame * const p = (AVFrame*)&s->picture;
int i, h, w, n, linesize, depth, maxval;
const char *tuple_type;
uint8_t *ptr;
if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
if (buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200) {
av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
return -1;
}
*p = *pict;
p->pict_type= FF_I_TYPE;
p->key_frame= 1;
*p = *pict;
p->pict_type = FF_I_TYPE;
p->key_frame = 1;
s->bytestream_start=
s->bytestream= outbuf;
s->bytestream_end= outbuf+buf_size;
s->bytestream_start =
s->bytestream = outbuf;
s->bytestream_end = outbuf+buf_size;
h = avctx->height;
w = avctx->width;
switch(avctx->pix_fmt) {
switch (avctx->pix_fmt) {
case PIX_FMT_MONOWHITE:
n = (w + 7) >> 3;
depth = 1;
maxval = 1;
n = (w + 7) >> 3;
depth = 1;
maxval = 1;
tuple_type = "BLACKANDWHITE";
break;
case PIX_FMT_GRAY8:
n = w;
depth = 1;
maxval = 255;
n = w;
depth = 1;
maxval = 255;
tuple_type = "GRAYSCALE";
break;
case PIX_FMT_RGB24:
n = w * 3;
depth = 3;
maxval = 255;
n = w * 3;
depth = 3;
maxval = 255;
tuple_type = "RGB";
break;
case PIX_FMT_RGB32:
n = w * 4;
depth = 4;
maxval = 255;
n = w * 4;
depth = 4;
maxval = 255;
tuple_type = "RGB_ALPHA";
break;
default:
@ -303,15 +307,15 @@ static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int bu
w, h, depth, maxval, tuple_type);
s->bytestream += strlen(s->bytestream);
ptr = p->data[0];
ptr = p->data[0];
linesize = p->linesize[0];
if (avctx->pix_fmt == PIX_FMT_RGB32) {
int j;
unsigned int v;
for(i=0;i<h;i++) {
for(j=0;j<w;j++) {
for (i = 0; i < h; i++) {
for (j = 0; j < w; j++) {
v = ((uint32_t *)ptr)[j];
bytestream_put_be24(&s->bytestream, v);
*s->bytestream++ = v >> 24;
@ -319,16 +323,17 @@ static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int bu
ptr += linesize;
}
} else {
for(i=0;i<h;i++) {
for (i = 0; i < h; i++) {
memcpy(s->bytestream, ptr, n);
s->bytestream += n;
ptr += linesize;
ptr += linesize;
}
}
return s->bytestream - s->bytestream_start;
}
static av_cold int common_end(AVCodecContext *avctx){
static av_cold int common_end(AVCodecContext *avctx)
{
PNMContext *s = avctx->priv_data;
if (s->picture.data[0])
@ -349,8 +354,8 @@ AVCodec pgm_decoder = {
common_end,
pnm_decode_frame,
CODEC_CAP_DR1,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
};
#endif
@ -362,10 +367,10 @@ AVCodec pgm_encoder = {
sizeof(PNMContext),
common_init,
pnm_encode_frame,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
};
#endif // CONFIG_PGM_ENCODER
#endif
#if CONFIG_PGMYUV_DECODER
AVCodec pgmyuv_decoder = {
@ -378,8 +383,8 @@ AVCodec pgmyuv_decoder = {
common_end,
pnm_decode_frame,
CODEC_CAP_DR1,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
};
#endif
@ -391,10 +396,10 @@ AVCodec pgmyuv_encoder = {
sizeof(PNMContext),
common_init,
pnm_encode_frame,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
};
#endif // CONFIG_PGMYUV_ENCODER
#endif
#if CONFIG_PPM_DECODER
AVCodec ppm_decoder = {
@ -407,8 +412,8 @@ AVCodec ppm_decoder = {
common_end,
pnm_decode_frame,
CODEC_CAP_DR1,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB48BE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB48BE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
};
#endif
@ -420,10 +425,10 @@ AVCodec ppm_encoder = {
sizeof(PNMContext),
common_init,
pnm_encode_frame,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB48BE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB48BE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
};
#endif // CONFIG_PPM_ENCODER
#endif
#if CONFIG_PBM_DECODER
AVCodec pbm_decoder = {
@ -436,8 +441,8 @@ AVCodec pbm_decoder = {
common_end,
pnm_decode_frame,
CODEC_CAP_DR1,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
};
#endif
@ -449,10 +454,10 @@ AVCodec pbm_encoder = {
sizeof(PNMContext),
common_init,
pnm_encode_frame,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
};
#endif // CONFIG_PBM_ENCODER
#endif
#if CONFIG_PAM_DECODER
AVCodec pam_decoder = {
@ -465,8 +470,8 @@ AVCodec pam_decoder = {
common_end,
pnm_decode_frame,
CODEC_CAP_DR1,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
};
#endif
@ -478,7 +483,7 @@ AVCodec pam_encoder = {
sizeof(PNMContext),
common_init,
pam_encode_frame,
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
};
#endif // CONFIG_PAM_ENCODER
#endif