You've already forked FFmpeg
mirror of
https://github.com/FFmpeg/FFmpeg.git
synced 2025-08-15 14:13:16 +02:00
yuv4mpeg: Use the wrapped avframe pseudo-encoder
This commit is contained in:
@@ -88,7 +88,7 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
|
|||||||
{
|
{
|
||||||
AVStream *st = s->streams[pkt->stream_index];
|
AVStream *st = s->streams[pkt->stream_index];
|
||||||
AVIOContext *pb = s->pb;
|
AVIOContext *pb = s->pb;
|
||||||
AVPicture *picture;
|
AVFrame *frame;
|
||||||
int* first_pkt = s->priv_data;
|
int* first_pkt = s->priv_data;
|
||||||
int width, height, h_chroma_shift, v_chroma_shift;
|
int width, height, h_chroma_shift, v_chroma_shift;
|
||||||
int i;
|
int i;
|
||||||
@@ -96,7 +96,7 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
|
|||||||
char buf1[20];
|
char buf1[20];
|
||||||
uint8_t *ptr, *ptr1, *ptr2;
|
uint8_t *ptr, *ptr1, *ptr2;
|
||||||
|
|
||||||
picture = (AVPicture *)pkt->data;
|
frame = (AVFrame *)pkt->data;
|
||||||
|
|
||||||
/* for the first packet we have to output the header as well */
|
/* for the first packet we have to output the header as well */
|
||||||
if (*first_pkt) {
|
if (*first_pkt) {
|
||||||
@@ -118,10 +118,10 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
|
|||||||
width = st->codec->width;
|
width = st->codec->width;
|
||||||
height = st->codec->height;
|
height = st->codec->height;
|
||||||
|
|
||||||
ptr = picture->data[0];
|
ptr = frame->data[0];
|
||||||
for (i = 0; i < height; i++) {
|
for (i = 0; i < height; i++) {
|
||||||
avio_write(pb, ptr, width);
|
avio_write(pb, ptr, width);
|
||||||
ptr += picture->linesize[0];
|
ptr += frame->linesize[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (st->codec->pix_fmt != AV_PIX_FMT_GRAY8) {
|
if (st->codec->pix_fmt != AV_PIX_FMT_GRAY8) {
|
||||||
@@ -132,15 +132,15 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
|
|||||||
width = -(-width >> h_chroma_shift);
|
width = -(-width >> h_chroma_shift);
|
||||||
height = -(-height >> v_chroma_shift);
|
height = -(-height >> v_chroma_shift);
|
||||||
|
|
||||||
ptr1 = picture->data[1];
|
ptr1 = frame->data[1];
|
||||||
ptr2 = picture->data[2];
|
ptr2 = frame->data[2];
|
||||||
for (i = 0; i < height; i++) { /* Cb */
|
for (i = 0; i < height; i++) { /* Cb */
|
||||||
avio_write(pb, ptr1, width);
|
avio_write(pb, ptr1, width);
|
||||||
ptr1 += picture->linesize[1];
|
ptr1 += frame->linesize[1];
|
||||||
}
|
}
|
||||||
for (i = 0; i < height; i++) { /* Cr */
|
for (i = 0; i < height; i++) { /* Cr */
|
||||||
avio_write(pb, ptr2, width);
|
avio_write(pb, ptr2, width);
|
||||||
ptr2 += picture->linesize[2];
|
ptr2 += frame->linesize[2];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
@@ -153,8 +153,8 @@ static int yuv4_write_header(AVFormatContext *s)
|
|||||||
if (s->nb_streams != 1)
|
if (s->nb_streams != 1)
|
||||||
return AVERROR(EIO);
|
return AVERROR(EIO);
|
||||||
|
|
||||||
if (s->streams[0]->codec->codec_id != AV_CODEC_ID_RAWVIDEO) {
|
if (s->streams[0]->codec->codec_id != AV_CODEC_ID_WRAPPED_AVFRAME) {
|
||||||
av_log(s, AV_LOG_ERROR, "ERROR: Only rawvideo supported.\n");
|
av_log(s, AV_LOG_ERROR, "ERROR: Codec not supported.\n");
|
||||||
return AVERROR_INVALIDDATA;
|
return AVERROR_INVALIDDATA;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -182,8 +182,7 @@ AVOutputFormat ff_yuv4mpegpipe_muxer = {
|
|||||||
.extensions = "y4m",
|
.extensions = "y4m",
|
||||||
.priv_data_size = sizeof(int),
|
.priv_data_size = sizeof(int),
|
||||||
.audio_codec = AV_CODEC_ID_NONE,
|
.audio_codec = AV_CODEC_ID_NONE,
|
||||||
.video_codec = AV_CODEC_ID_RAWVIDEO,
|
.video_codec = AV_CODEC_ID_WRAPPED_AVFRAME,
|
||||||
.write_header = yuv4_write_header,
|
.write_header = yuv4_write_header,
|
||||||
.write_packet = yuv4_write_packet,
|
.write_packet = yuv4_write_packet,
|
||||||
.flags = AVFMT_RAWPICTURE,
|
|
||||||
};
|
};
|
||||||
|
Reference in New Issue
Block a user