1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

v4l2: add libv4l2 support.

This commit is contained in:
Clément Bœsch 2011-10-30 03:21:55 +01:00
parent 434db5719e
commit 1054ab3595
4 changed files with 61 additions and 27 deletions

View File

@ -75,6 +75,7 @@ easier to use. The changes are:
- new ffmpeg option: -map_channel
- volume audio filter added
- earwax audio filter added
- libv4l2 support (--enable-libv4l2)
version 0.8:

5
configure vendored
View File

@ -187,6 +187,7 @@ External library support:
--enable-libstagefright-h264 enable H.264 decoding via libstagefright [no]
--enable-libtheora enable Theora encoding via libtheora [no]
--enable-libutvideo enable Ut Video decoding via libutvideo [no]
--enable-libv4l2 enable libv4l2/v4l-utils [no]
--enable-libvo-aacenc enable AAC encoding via libvo-aacenc [no]
--enable-libvo-amrwbenc enable AMR-WB encoding via libvo-amrwbenc [no]
--enable-libvorbis enable Vorbis encoding via libvorbis,
@ -1022,6 +1023,7 @@ CONFIG_LIST="
libstagefright_h264
libtheora
libutvideo
libv4l2
libvo_aacenc
libvo_amrwbenc
libvorbis
@ -1568,6 +1570,7 @@ jack_indev_deps="jack_jack_h sem_timedwait"
lavfi_indev_deps="avfilter"
libcdio_indev_deps="libcdio"
libdc1394_indev_deps="libdc1394"
libv4l2_indev_deps="libv4l2"
openal_indev_deps="openal"
oss_indev_deps_any="soundcard_h sys_soundcard_h"
oss_outdev_deps_any="soundcard_h sys_soundcard_h"
@ -3055,6 +3058,7 @@ enabled libstagefright_h264 && require_cpp libstagefright_h264 "binder/ProcessS
media/stagefright/OMXClient.h media/stagefright/OMXCodec.h" android::OMXClient -lstagefright -lmedia -lutils -lbinder
enabled libtheora && require libtheora theora/theoraenc.h th_info_init -ltheoraenc -ltheoradec -logg
enabled libutvideo && require_cpp utvideo "stdint.h stdlib.h utvideo/utvideo.h utvideo/Codec.h" 'CCodec*' -lutvideo -lstdc++
enabled libv4l2 && require_pkg_config libv4l2 libv4l2.h v4l2_ioctl
enabled libvo_aacenc && require libvo_aacenc vo-aacenc/voAAC.h voGetAACEncAPI -lvo-aacenc
enabled libvo_amrwbenc && require libvo_amrwbenc vo-amrwbenc/enc_if.h E_IF_init -lvo-amrwbenc
enabled libvorbis && require libvorbis vorbis/vorbisenc.h vorbis_info_init -lvorbisenc -lvorbis -logg
@ -3383,6 +3387,7 @@ echo "libspeex enabled ${libspeex-no}"
echo "libstagefright-h264 enabled ${libstagefright_h264-no}"
echo "libtheora enabled ${libtheora-no}"
echo "libutvideo enabled ${libutvideo-no}"
echo "libv4l2 enabled ${libv4l2-no}"
echo "libvo-aacenc support ${libvo_aacenc-no}"
echo "libvo-amrwbenc support ${libvo_amrwbenc-no}"
echo "libvorbis enabled ${libvorbis-no}"

View File

@ -513,6 +513,9 @@ input device will use the frame rate value already set in the driver.
Video4Linux support is deprecated since Linux 2.6.30, and will be
dropped in later versions.
Note that if FFmpeg is build with v4l-utils support ("--enable-libv4l2"
option), it will always be used.
Follow some usage examples of the video4linux devices with the ff*
tools.
@example

View File

@ -52,6 +52,18 @@
#include "libavutil/pixdesc.h"
#include "libavutil/avstring.h"
#if CONFIG_LIBV4L2
#include <libv4l2.h>
#else
#define v4l2_open open
#define v4l2_close close
#define v4l2_dup dup
#define v4l2_ioctl ioctl
#define v4l2_read read
#define v4l2_mmap mmap
#define v4l2_munmap munmap
#endif
static const int desired_video_buffers = 256;
enum io_method {
@ -113,36 +125,49 @@ static int device_open(AVFormatContext *ctx, uint32_t *capabilities)
{
struct v4l2_capability cap;
int fd;
#if CONFIG_LIBV4L2
int fd_libv4l;
#endif
int res, err;
int flags = O_RDWR;
if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
flags |= O_NONBLOCK;
}
fd = open(ctx->filename, flags, 0);
fd = v4l2_open(ctx->filename, flags, 0);
if (fd < 0) {
av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s : %s\n",
ctx->filename, strerror(errno));
return AVERROR(errno);
}
#if CONFIG_LIBV4L2
fd_libv4l = v4l2_fd_open(fd, 0);
if (fd < 0) {
err = AVERROR(errno);
av_log(ctx, AV_LOG_ERROR, "Cannot open video device with libv4l neither %s : %s\n",
ctx->filename, strerror(errno));
return err;
}
fd = fd_libv4l;
#endif
res = ioctl(fd, VIDIOC_QUERYCAP, &cap);
res = v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap);
// ENOIOCTLCMD definition only availble on __KERNEL__
if (res < 0 && ((err = errno) == 515)) {
av_log(ctx, AV_LOG_ERROR, "QUERYCAP not implemented, probably V4L device but not supporting V4L2\n");
close(fd);
v4l2_close(fd);
return AVERROR(515);
}
if (res < 0) {
av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
strerror(errno));
close(fd);
v4l2_close(fd);
return AVERROR(err);
}
if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
av_log(ctx, AV_LOG_ERROR, "Not a video capture device\n");
close(fd);
v4l2_close(fd);
return AVERROR(ENODEV);
}
*capabilities = cap.capabilities;
@ -162,7 +187,7 @@ static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t p
fmt.fmt.pix.height = *height;
fmt.fmt.pix.pixelformat = pix_fmt;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
res = ioctl(fd, VIDIOC_S_FMT, &fmt);
res = v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt);
if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
av_log(ctx, AV_LOG_INFO, "The V4L2 driver changed the video from %dx%d to %dx%d\n", *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
*width = fmt.fmt.pix.width;
@ -182,7 +207,7 @@ static int first_field(int fd)
int res;
v4l2_std_id std;
res = ioctl(fd, VIDIOC_G_STD, &std);
res = v4l2_ioctl(fd, VIDIOC_G_STD, &std);
if (res < 0) {
return 0;
}
@ -245,7 +270,7 @@ static int mmap_init(AVFormatContext *ctx)
req.count = desired_video_buffers;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
res = ioctl(s->fd, VIDIOC_REQBUFS, &req);
res = v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req);
if (res < 0) {
if (errno == EINVAL) {
av_log(ctx, AV_LOG_ERROR, "Device does not support mmap\n");
@ -278,7 +303,7 @@ static int mmap_init(AVFormatContext *ctx)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
res = ioctl(s->fd, VIDIOC_QUERYBUF, &buf);
res = v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf);
if (res < 0) {
av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");
return AVERROR(errno);
@ -290,7 +315,7 @@ static int mmap_init(AVFormatContext *ctx)
return -1;
}
s->buf_start[i] = mmap (NULL, buf.length,
s->buf_start[i] = v4l2_mmap(NULL, buf.length,
PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset);
if (s->buf_start[i] == MAP_FAILED) {
av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", strerror(errno));
@ -322,7 +347,7 @@ static void mmap_release_buffer(AVPacket *pkt)
fd = buf_descriptor->fd;
av_free(buf_descriptor);
res = ioctl(fd, VIDIOC_QBUF, &buf);
res = v4l2_ioctl(fd, VIDIOC_QBUF, &buf);
if (res < 0) {
av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", strerror(errno));
}
@ -341,7 +366,7 @@ static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
buf.memory = V4L2_MEMORY_MMAP;
/* FIXME: Some special treatment might be needed in case of loss of signal... */
while ((res = ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
if (res < 0) {
if (errno == EAGAIN) {
pkt->size = 0;
@ -368,7 +393,7 @@ static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
* allocate a buffer for memcopying into it
*/
av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
res = ioctl(s->fd, VIDIOC_QBUF, &buf);
res = v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf);
return AVERROR(ENOMEM);
}
@ -397,7 +422,7 @@ static int mmap_start(AVFormatContext *ctx)
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
res = ioctl(s->fd, VIDIOC_QBUF, &buf);
res = v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf);
if (res < 0) {
av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", strerror(errno));
return AVERROR(errno);
@ -405,7 +430,7 @@ static int mmap_start(AVFormatContext *ctx)
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
res = ioctl(s->fd, VIDIOC_STREAMON, &type);
res = v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type);
if (res < 0) {
av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", strerror(errno));
return AVERROR(errno);
@ -423,9 +448,9 @@ static void mmap_close(struct video_data *s)
/* We do not check for the result, because we could
* not do anything about it anyway...
*/
ioctl(s->fd, VIDIOC_STREAMOFF, &type);
v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
for (i = 0; i < s->buffers; i++) {
munmap(s->buf_start[i], s->buf_len[i]);
v4l2_munmap(s->buf_start[i], s->buf_len[i]);
}
av_free(s->buf_start);
av_free(s->buf_len);
@ -450,14 +475,14 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
/* set tv video input */
input.index = s->channel;
if (ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl enum input failed:\n");
return AVERROR(EIO);
}
av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set input_id: %d, input: %s\n",
s->channel, input.name);
if (ioctl(s->fd, VIDIOC_S_INPUT, &input.index) < 0) {
if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &input.index) < 0) {
av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl set input(%d) failed\n",
s->channel);
return AVERROR(EIO);
@ -469,7 +494,7 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
/* set tv standard */
for (i = 0;; i++) {
standard.index = i;
ret = ioctl(s->fd, VIDIOC_ENUMSTD, &standard);
ret = v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard);
if (ret < 0 || !av_strcasecmp(standard.name, s->standard))
break;
}
@ -480,7 +505,7 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set standard: %s, id: %"PRIu64"\n",
s->standard, (uint64_t)standard.id);
if (ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl set standard(%s) failed\n",
s->standard);
return AVERROR(EIO);
@ -492,7 +517,7 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
framerate_q.den, framerate_q.num);
tpf->numerator = framerate_q.den;
tpf->denominator = framerate_q.num;
if (ioctl(s->fd, VIDIOC_S_PARM, &streamparm) != 0) {
if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) != 0) {
av_log(s1, AV_LOG_ERROR,
"ioctl set time per frame(%d/%d) failed\n",
framerate_q.den, framerate_q.num);
@ -508,7 +533,7 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
}
} else {
/* if timebase value is not set, read the timebase value from the driver */
if (ioctl(s->fd, VIDIOC_G_PARM, &streamparm) != 0) {
if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) != 0) {
av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_PARM): %s\n", strerror(errno));
return AVERROR(errno);
}
@ -590,7 +615,7 @@ static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
av_log(s1, AV_LOG_VERBOSE, "Querying the device for the current frame size\n");
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", strerror(errno));
res = AVERROR(errno);
goto out;
@ -604,7 +629,7 @@ static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
if (desired_format == 0) {
av_log(s1, AV_LOG_ERROR, "Cannot find a proper format for "
"codec_id %d, pix_fmt %d.\n", s1->video_codec_id, pix_fmt);
close(s->fd);
v4l2_close(s->fd);
res = AVERROR(EIO);
goto out;
@ -629,7 +654,7 @@ static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
res = read_init(s1);
}
if (res < 0) {
close(s->fd);
v4l2_close(s->fd);
res = AVERROR(EIO);
goto out;
}
@ -681,7 +706,7 @@ static int v4l2_read_close(AVFormatContext *s1)
mmap_close(s);
}
close(s->fd);
v4l2_close(s->fd);
return 0;
}