mirror of
https://github.com/FFmpeg/FFmpeg.git
synced 2024-11-26 19:01:44 +02:00
4972b26f24
Originally committed as revision 148 to svn://svn.ffmpeg.org/ffmpeg/trunk
302 lines
7.8 KiB
C
302 lines
7.8 KiB
C
/*
|
|
* Linux video grab interface
|
|
* Copyright (c) 2000,2001 Gerard Lantau.
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License as published by
|
|
* the Free Software Foundation; either version 2 of the License, or
|
|
* (at your option) any later version.
|
|
*
|
|
* This program is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License
|
|
* along with this program; if not, write to the Free Software
|
|
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
|
*/
|
|
#include "avformat.h"
|
|
#include <linux/videodev.h>
|
|
#include <unistd.h>
|
|
#include <fcntl.h>
|
|
#include <sys/ioctl.h>
|
|
#include <sys/mman.h>
|
|
#include <sys/time.h>
|
|
|
|
|
|
typedef struct {
|
|
int fd;
|
|
int frame_format; /* see VIDEO_PALETTE_xxx */
|
|
int use_mmap;
|
|
int width, height;
|
|
int frame_rate;
|
|
INT64 time_frame;
|
|
int frame_size;
|
|
} VideoData;
|
|
|
|
const char *v4l_device = "/dev/video";
|
|
|
|
/* XXX: move all that to the context */
|
|
|
|
static struct video_capability video_cap;
|
|
static UINT8 *video_buf;
|
|
static struct video_mbuf gb_buffers;
|
|
static struct video_mmap gb_buf;
|
|
static struct video_audio audio, audio_saved;
|
|
static int gb_frame = 0;
|
|
|
|
static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
|
|
{
|
|
VideoData *s;
|
|
AVStream *st;
|
|
int width, height;
|
|
int video_fd, frame_size;
|
|
int ret, frame_rate;
|
|
|
|
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
|
return -1;
|
|
|
|
width = ap->width;
|
|
height = ap->height;
|
|
frame_rate = ap->frame_rate;
|
|
|
|
s = av_mallocz(sizeof(VideoData));
|
|
if (!s)
|
|
return -ENOMEM;
|
|
st = av_mallocz(sizeof(AVStream));
|
|
if (!st) {
|
|
free(s);
|
|
return -ENOMEM;
|
|
}
|
|
s1->priv_data = s;
|
|
s1->nb_streams = 1;
|
|
s1->streams[0] = st;
|
|
|
|
s->width = width;
|
|
s->height = height;
|
|
s->frame_rate = frame_rate;
|
|
|
|
video_fd = open(v4l_device, O_RDWR);
|
|
if (video_fd < 0) {
|
|
perror(v4l_device);
|
|
goto fail;
|
|
}
|
|
|
|
if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
|
|
perror("VIDIOCGCAP");
|
|
goto fail;
|
|
}
|
|
|
|
if (!(video_cap.type & VID_TYPE_CAPTURE)) {
|
|
fprintf(stderr, "Fatal: grab device does not handle capture\n");
|
|
goto fail;
|
|
}
|
|
|
|
/* unmute audio */
|
|
ioctl(video_fd, VIDIOCGAUDIO, &audio);
|
|
memcpy(&audio_saved, &audio, sizeof(audio));
|
|
audio.flags &= ~VIDEO_AUDIO_MUTE;
|
|
ioctl(video_fd, VIDIOCSAUDIO, &audio);
|
|
|
|
ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
|
|
if (ret < 0) {
|
|
/* try to use read based access */
|
|
struct video_window win;
|
|
struct video_picture pict;
|
|
int val;
|
|
|
|
win.x = 0;
|
|
win.y = 0;
|
|
win.width = width;
|
|
win.height = height;
|
|
win.chromakey = -1;
|
|
win.flags = 0;
|
|
|
|
ioctl(video_fd, VIDIOCSWIN, &win);
|
|
|
|
ioctl(video_fd, VIDIOCGPICT, &pict);
|
|
#if 0
|
|
printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
|
|
pict.colour,
|
|
pict.hue,
|
|
pict.brightness,
|
|
pict.contrast,
|
|
pict.whiteness);
|
|
#endif
|
|
/* try to choose a suitable video format */
|
|
pict.palette=VIDEO_PALETTE_YUV420P;
|
|
ret = ioctl(video_fd, VIDIOCSPICT, &pict);
|
|
if (ret < 0) {
|
|
pict.palette=VIDEO_PALETTE_YUV422;
|
|
ret = ioctl(video_fd, VIDIOCSPICT, &pict);
|
|
if (ret < 0) {
|
|
pict.palette=VIDEO_PALETTE_RGB24;
|
|
ret = ioctl(video_fd, VIDIOCSPICT, &pict);
|
|
if (ret < 0)
|
|
goto fail1;
|
|
}
|
|
}
|
|
|
|
s->frame_format = pict.palette;
|
|
|
|
val = 1;
|
|
ioctl(video_fd, VIDIOCCAPTURE, &val);
|
|
|
|
s->time_frame = gettime();
|
|
s->use_mmap = 0;
|
|
} else {
|
|
video_buf = mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
|
|
if ((unsigned char*)-1 == video_buf) {
|
|
perror("mmap");
|
|
goto fail;
|
|
}
|
|
gb_frame = 0;
|
|
s->time_frame = gettime();
|
|
|
|
/* start to grab the first frame */
|
|
gb_buf.frame = (gb_frame + 1) % gb_buffers.frames;
|
|
gb_buf.height = height;
|
|
gb_buf.width = width;
|
|
gb_buf.format = VIDEO_PALETTE_YUV420P;
|
|
|
|
ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
|
|
if (ret < 0 && errno != EAGAIN) {
|
|
/* try YUV422 */
|
|
gb_buf.format = VIDEO_PALETTE_YUV422;
|
|
|
|
ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
|
|
if (ret < 0 && errno != EAGAIN) {
|
|
/* try RGB24 */
|
|
gb_buf.format = VIDEO_PALETTE_RGB24;
|
|
ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
|
|
}
|
|
}
|
|
if (ret < 0) {
|
|
if (errno != EAGAIN) {
|
|
fail1:
|
|
fprintf(stderr, "Fatal: grab device does not support suitable format\n");
|
|
} else {
|
|
fprintf(stderr,"Fatal: grab device does not receive any video signal\n");
|
|
}
|
|
goto fail;
|
|
}
|
|
s->frame_format = gb_buf.format;
|
|
s->use_mmap = 1;
|
|
}
|
|
|
|
switch(s->frame_format) {
|
|
case VIDEO_PALETTE_YUV420P:
|
|
frame_size = (width * height * 3) / 2;
|
|
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
|
break;
|
|
case VIDEO_PALETTE_YUV422:
|
|
frame_size = width * height * 2;
|
|
st->codec.pix_fmt = PIX_FMT_YUV422;
|
|
break;
|
|
case VIDEO_PALETTE_RGB24:
|
|
frame_size = width * height * 3;
|
|
st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
|
|
break;
|
|
default:
|
|
goto fail;
|
|
}
|
|
s->fd = video_fd;
|
|
s->frame_size = frame_size;
|
|
|
|
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
|
st->codec.width = width;
|
|
st->codec.height = height;
|
|
st->codec.frame_rate = frame_rate;
|
|
|
|
return 0;
|
|
fail:
|
|
if (video_fd >= 0)
|
|
close(video_fd);
|
|
free(st);
|
|
free(s);
|
|
return -EIO;
|
|
}
|
|
|
|
static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
|
|
{
|
|
UINT8 *ptr;
|
|
|
|
gb_buf.frame = gb_frame;
|
|
if (ioctl(s->fd, VIDIOCMCAPTURE, &gb_buf) < 0) {
|
|
if (errno == EAGAIN)
|
|
fprintf(stderr,"Cannot Sync\n");
|
|
else
|
|
perror("VIDIOCMCAPTURE");
|
|
return -EIO;
|
|
}
|
|
gb_frame = (gb_frame + 1) % gb_buffers.frames;
|
|
|
|
while (ioctl(s->fd, VIDIOCSYNC, &gb_frame) < 0 &&
|
|
(errno == EAGAIN || errno == EINTR));
|
|
|
|
ptr = video_buf + gb_buffers.offsets[gb_frame];
|
|
memcpy(buf, ptr, s->frame_size);
|
|
return s->frame_size;
|
|
}
|
|
|
|
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
|
{
|
|
VideoData *s = s1->priv_data;
|
|
INT64 curtime, delay;
|
|
struct timespec ts;
|
|
|
|
/* wait based on the frame rate */
|
|
s->time_frame += (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
|
|
for(;;) {
|
|
curtime = gettime();
|
|
delay = s->time_frame - curtime;
|
|
if (delay <= 0)
|
|
break;
|
|
ts.tv_sec = delay / 1000000;
|
|
ts.tv_nsec = (delay % 1000000) * 1000;
|
|
nanosleep(&ts, NULL);
|
|
}
|
|
|
|
if (av_new_packet(pkt, s->frame_size) < 0)
|
|
return -EIO;
|
|
|
|
/* read one frame */
|
|
if (s->use_mmap) {
|
|
return v4l_mm_read_picture(s, pkt->data);
|
|
} else {
|
|
if (read(s->fd, pkt->data, pkt->size) != pkt->size)
|
|
return -EIO;
|
|
return s->frame_size;
|
|
}
|
|
}
|
|
|
|
static int grab_read_close(AVFormatContext *s1)
|
|
{
|
|
VideoData *s = s1->priv_data;
|
|
/* restore audio settings */
|
|
ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
|
|
|
|
close(s->fd);
|
|
free(s);
|
|
return 0;
|
|
}
|
|
|
|
AVFormat video_grab_device_format = {
|
|
"video_grab_device",
|
|
"video grab",
|
|
"",
|
|
"",
|
|
CODEC_ID_NONE,
|
|
CODEC_ID_NONE,
|
|
NULL,
|
|
NULL,
|
|
NULL,
|
|
|
|
grab_read_header,
|
|
grab_read_packet,
|
|
grab_read_close,
|
|
NULL,
|
|
AVFMT_NOFILE,
|
|
};
|