1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-11-26 19:01:44 +02:00

avdevice/v4l2: add limited support for multiplanar API

This commit adds support for V4L2's multiplanar API, but only when the
number of planes is 1.

Adding full support for the multiplanar API would require a device that
actually uses more than 1 plane, which I have not found yet.
This commit is contained in:
Ramiro Polla 2024-06-20 17:40:39 +02:00
parent cf7e2c5e05
commit 7eb37c142a

View File

@ -92,6 +92,9 @@ struct video_data {
TimeFilter *timefilter; TimeFilter *timefilter;
int64_t last_time_m; int64_t last_time_m;
int multiplanar;
enum v4l2_buf_type buf_type;
int buffers; int buffers;
atomic_int buffers_queued; atomic_int buffers_queued;
void **buf_start; void **buf_start;
@ -182,7 +185,13 @@ static int device_open(AVFormatContext *ctx, const char* device_path)
av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n", av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
fd, cap.capabilities); fd, cap.capabilities);
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
s->multiplanar = 0;
s->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
s->multiplanar = 1;
s->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
} else {
av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n"); av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
err = AVERROR(ENODEV); err = AVERROR(ENODEV);
goto fail; goto fail;
@ -206,7 +215,7 @@ static int device_init(AVFormatContext *ctx, int *width, int *height,
uint32_t pixelformat) uint32_t pixelformat)
{ {
struct video_data *s = ctx->priv_data; struct video_data *s = ctx->priv_data;
struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; struct v4l2_format fmt = { .type = s->buf_type };
int res = 0; int res = 0;
fmt.fmt.pix.width = *width; fmt.fmt.pix.width = *width;
@ -288,7 +297,7 @@ static void list_framesizes(AVFormatContext *ctx, uint32_t pixelformat)
static void list_formats(AVFormatContext *ctx, int type) static void list_formats(AVFormatContext *ctx, int type)
{ {
const struct video_data *s = ctx->priv_data; const struct video_data *s = ctx->priv_data;
struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; struct v4l2_fmtdesc vfd = { .type = s->buf_type };
while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) { while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) {
enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat); enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat);
@ -352,7 +361,7 @@ static int mmap_init(AVFormatContext *ctx)
int i, res; int i, res;
struct video_data *s = ctx->priv_data; struct video_data *s = ctx->priv_data;
struct v4l2_requestbuffers req = { struct v4l2_requestbuffers req = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .type = s->buf_type,
.count = desired_video_buffers, .count = desired_video_buffers,
.memory = V4L2_MEMORY_MMAP .memory = V4L2_MEMORY_MMAP
}; };
@ -381,10 +390,14 @@ static int mmap_init(AVFormatContext *ctx)
} }
for (i = 0; i < req.count; i++) { for (i = 0; i < req.count; i++) {
unsigned int buf_length, buf_offset;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buf = { struct v4l2_buffer buf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .type = s->buf_type,
.index = i, .index = i,
.memory = V4L2_MEMORY_MMAP .memory = V4L2_MEMORY_MMAP,
.m.planes = s->multiplanar ? planes : NULL,
.length = s->multiplanar ? VIDEO_MAX_PLANES : 0,
}; };
if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) { if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
res = AVERROR(errno); res = AVERROR(errno);
@ -392,16 +405,28 @@ static int mmap_init(AVFormatContext *ctx)
return res; return res;
} }
s->buf_len[i] = buf.length; if (s->multiplanar) {
if (buf.length != 1) {
av_log(ctx, AV_LOG_ERROR, "multiplanar only supported when buf.length == 1\n");
return AVERROR_PATCHWELCOME;
}
buf_length = buf.m.planes[0].length;
buf_offset = buf.m.planes[0].m.mem_offset;
} else {
buf_length = buf.length;
buf_offset = buf.m.offset;
}
s->buf_len[i] = buf_length;
if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) { if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
av_log(ctx, AV_LOG_ERROR, av_log(ctx, AV_LOG_ERROR,
"buf_len[%d] = %d < expected frame size %d\n", "buf_len[%d] = %d < expected frame size %d\n",
i, s->buf_len[i], s->frame_size); i, s->buf_len[i], s->frame_size);
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
} }
s->buf_start[i] = v4l2_mmap(NULL, buf.length, s->buf_start[i] = v4l2_mmap(NULL, buf_length,
PROT_READ | PROT_WRITE, MAP_SHARED, PROT_READ | PROT_WRITE, MAP_SHARED,
s->fd, buf.m.offset); s->fd, buf_offset);
if (s->buf_start[i] == MAP_FAILED) { if (s->buf_start[i] == MAP_FAILED) {
res = AVERROR(errno); res = AVERROR(errno);
@ -429,13 +454,16 @@ static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
static void mmap_release_buffer(void *opaque, uint8_t *data) static void mmap_release_buffer(void *opaque, uint8_t *data)
{ {
struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buf = { 0 }; struct v4l2_buffer buf = { 0 };
struct buff_data *buf_descriptor = opaque; struct buff_data *buf_descriptor = opaque;
struct video_data *s = buf_descriptor->s; struct video_data *s = buf_descriptor->s;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.type = s->buf_type;
buf.memory = V4L2_MEMORY_MMAP; buf.memory = V4L2_MEMORY_MMAP;
buf.index = buf_descriptor->index; buf.index = buf_descriptor->index;
buf.m.planes = s->multiplanar ? planes : NULL;
buf.length = s->multiplanar ? VIDEO_MAX_PLANES : 0;
av_free(buf_descriptor); av_free(buf_descriptor);
enqueue_buffer(s, &buf); enqueue_buffer(s, &buf);
@ -505,11 +533,15 @@ static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt) static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
{ {
struct video_data *s = ctx->priv_data; struct video_data *s = ctx->priv_data;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buf = { struct v4l2_buffer buf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .type = s->buf_type,
.memory = V4L2_MEMORY_MMAP .memory = V4L2_MEMORY_MMAP,
.m.planes = s->multiplanar ? planes : NULL,
.length = s->multiplanar ? VIDEO_MAX_PLANES : 0,
}; };
struct timeval buf_ts; struct timeval buf_ts;
unsigned int bytesused;
int res; int res;
pkt->size = 0; pkt->size = 0;
@ -536,38 +568,40 @@ static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
// always keep at least one buffer queued // always keep at least one buffer queued
av_assert0(atomic_load(&s->buffers_queued) >= 1); av_assert0(atomic_load(&s->buffers_queued) >= 1);
bytesused = s->multiplanar ? buf.m.planes[0].bytesused : buf.bytesused;
#ifdef V4L2_BUF_FLAG_ERROR #ifdef V4L2_BUF_FLAG_ERROR
if (buf.flags & V4L2_BUF_FLAG_ERROR) { if (buf.flags & V4L2_BUF_FLAG_ERROR) {
av_log(ctx, AV_LOG_WARNING, av_log(ctx, AV_LOG_WARNING,
"Dequeued v4l2 buffer contains corrupted data (%d bytes).\n", "Dequeued v4l2 buffer contains corrupted data (%d bytes).\n",
buf.bytesused); bytesused);
buf.bytesused = 0; bytesused = 0;
} else } else
#endif #endif
{ {
/* CPIA is a compressed format and we don't know the exact number of bytes /* CPIA is a compressed format and we don't know the exact number of bytes
* used by a frame, so set it here as the driver announces it. */ * used by a frame, so set it here as the driver announces it. */
if (ctx->video_codec_id == AV_CODEC_ID_CPIA) if (ctx->video_codec_id == AV_CODEC_ID_CPIA)
s->frame_size = buf.bytesused; s->frame_size = bytesused;
if (s->frame_size > 0 && buf.bytesused != s->frame_size) { if (s->frame_size > 0 && bytesused != s->frame_size) {
av_log(ctx, AV_LOG_WARNING, av_log(ctx, AV_LOG_WARNING,
"Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n", "Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n",
buf.bytesused, s->frame_size, buf.flags); bytesused, s->frame_size, buf.flags);
buf.bytesused = 0; bytesused = 0;
} }
} }
/* Image is at s->buff_start[buf.index] */ /* Image is at s->buff_start[buf.index] */
if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) { if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
/* when we start getting low on queued buffers, fall back on copying data */ /* when we start getting low on queued buffers, fall back on copying data */
res = av_new_packet(pkt, buf.bytesused); res = av_new_packet(pkt, bytesused);
if (res < 0) { if (res < 0) {
av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n"); av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
enqueue_buffer(s, &buf); enqueue_buffer(s, &buf);
return res; return res;
} }
memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused); memcpy(pkt->data, s->buf_start[buf.index], bytesused);
res = enqueue_buffer(s, &buf); res = enqueue_buffer(s, &buf);
if (res) { if (res) {
@ -578,7 +612,7 @@ static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
struct buff_data *buf_descriptor; struct buff_data *buf_descriptor;
pkt->data = s->buf_start[buf.index]; pkt->data = s->buf_start[buf.index];
pkt->size = buf.bytesused; pkt->size = bytesused;
buf_descriptor = av_malloc(sizeof(struct buff_data)); buf_descriptor = av_malloc(sizeof(struct buff_data));
if (!buf_descriptor) { if (!buf_descriptor) {
@ -615,10 +649,13 @@ static int mmap_start(AVFormatContext *ctx)
int i, res; int i, res;
for (i = 0; i < s->buffers; i++) { for (i = 0; i < s->buffers; i++) {
struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buf = { struct v4l2_buffer buf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .type = s->buf_type,
.index = i, .index = i,
.memory = V4L2_MEMORY_MMAP .memory = V4L2_MEMORY_MMAP,
.m.planes = s->multiplanar ? planes : NULL,
.length = s->multiplanar ? VIDEO_MAX_PLANES : 0,
}; };
if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) { if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
@ -630,7 +667,7 @@ static int mmap_start(AVFormatContext *ctx)
} }
atomic_store(&s->buffers_queued, s->buffers); atomic_store(&s->buffers_queued, s->buffers);
type = V4L2_BUF_TYPE_VIDEO_CAPTURE; type = s->buf_type;
if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) { if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
res = AVERROR(errno); res = AVERROR(errno);
av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n",
@ -646,7 +683,7 @@ static void mmap_close(struct video_data *s)
enum v4l2_buf_type type; enum v4l2_buf_type type;
int i; int i;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE; type = s->buf_type;
/* We do not check for the result, because we could /* We do not check for the result, because we could
* not do anything about it anyway... * not do anything about it anyway...
*/ */
@ -733,7 +770,7 @@ static int v4l2_set_parameters(AVFormatContext *ctx)
tpf = &streamparm.parm.capture.timeperframe; tpf = &streamparm.parm.capture.timeperframe;
} }
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; streamparm.type = s->buf_type;
if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) { if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
ret = AVERROR(errno); ret = AVERROR(errno);
av_log(ctx, AV_LOG_WARNING, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret)); av_log(ctx, AV_LOG_WARNING, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
@ -921,7 +958,7 @@ static int v4l2_read_header(AVFormatContext *ctx)
} }
if (!s->width && !s->height) { if (!s->width && !s->height) {
struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; struct v4l2_format fmt = { .type = s->buf_type };
av_log(ctx, AV_LOG_VERBOSE, av_log(ctx, AV_LOG_VERBOSE,
"Querying the device for the current frame size\n"); "Querying the device for the current frame size\n");