V4L2 全称 Video for Linux Two API Specification,它是Linux 内核中关于视频设备的子系统,它为linux 下的视频驱动提供了统一的接口,使得应用程序可以使用统一的API 函数操作不同的视频设备,极大地简化了视频系统的开发和维护。
由于早期的 V4L 有很多缺陷,Bill Dirks 等人对其进行了重新设计,并取名为Video for Linux Two,最早出现于Linux2.5.x 版本。V4L2 相比于V4L 有更好的扩展性和灵活性,并且支持的硬件设备更多。以下地址是几个非常全面的V4L2接口规范链接,文档详细讲解了V4L2相关知识和体系结构,地址如下:
(1)http://www.linuxtv.org/downloads/v4l-dvb-apis/index.html
(2)http://www.linuxtv.org/downloads/legacy/video4linux/API/V4L2_API/spec-single/v4l2.html
(3)http://v4l.videotechnology.com/dwg/v4l2.pdf
V4L2可以支持多种设备,它提供有以下几种接口:
(1)视频采集接口(video capture interface):这种应用的设备可以是高频头或者摄像头。V4L2的最初设计就是应用于这种功能的。
(2)视频输出接口(video output interface):可以驱动计算机的外围视频图像设备,可以输出视频到电视信号格式的设备。
(3) 直接传输视频接口(video overlay interface):它的主要工作是把从视频采集设备采集过来的信号直接输出到输出设备之上,而不用经过系统的CPU。
(4)视频间隔消隐信号接口(VBI interface):它可以使应用可以访问传输消隐期的视频信号。
(5)收音机接口(radio interface):可用来处理从AM或FM高频头设备接收来的音频流。
对V4L2 Device编程,通常包含以下步骤:
fd = open (dev_name, O_RDWR | O_NONBLOCK, 0);//打开设备
我更觉得应该是查询驱动能力,因为同一个USB摄像头,在电脑上与开发板上驱动输出的数据格式可能是不一样的。
int ioctl(intfd, int request, struct v4l2_capability *argp);
v4l2_capability 结构体定义如下:
structv4l2_capability
{
__u8 driver[16]; // 驱动名
__u8 card[32]; // 设备名
__u8 bus_info[32]; // 设备在系统总线中的信息
__u32 version; // 驱动版本号
__u32 capabilities; // 设备支持的操作--我们最关心的属性
__u32 reserved[4];
};
capabilities支持的定义如下:
V4L2_CAP_VIDEO_CAPTURE 0x00000001 //The device supports the Video Capture interface.
V4L2_CAP_VIDEO_OUTPUT 0x00000002 //The device supports the Video Output interface.
V4L2_CAP_VIDEO_OVERLAY 0x00000004 //The device supports the Video Overlay interface. A video overlay device typically stores captured images directly in the video memory of a graphics card, with hardware clipping and scaling.
V4L2_CAP_VBI_CAPTURE 0x00000010 //The device supports the Raw VBI Capture interface, providing Teletext and Closed Caption data.
V4L2_CAP_VBI_OUTPUT 0x00000020 //The device supports the Raw VBI Output interface.
V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 The device supports the Sliced VBI Capture interface.
V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 //The device supports the Sliced VBI Output interface.
V4L2_CAP_RDS_CAPTURE 0x00000100 //The device supports the RDS interface.
V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 //The device supports the Video Output Overlay (OSD) interface. Unlike the Video Overlay interface, this is a secondary function of video output devices and overlays an image onto an outgoing video signal. When the driver sets this flag, it must clear the V4L2_CAP_VIDEO_OVERLAY flag and vice versa.[a]
V4L2_CAP_HW_FREQ_SEEK 0x00000400 //The device supports the VIDIOC_S_HW_FREQ_SEEK ioctl for hardware frequency seeking.
V4L2_CAP_TUNER 0x00010000 //The device has some sort of tuner to receive RF-modulated video signals. For more information about tuner programming see Section 1.6, “Tuners and Modulators”.
V4L2_CAP_AUDIO 0x00020000 //The device has audio inputs or outputs. It may or may not support audio recording or playback, in PCM or compressed formats. PCM audio support must be implemented as ALSA or OSS interface. For more information on audio inputs and outputs see Section 1.5, “Audio Inputs and Outputs”.
V4L2_CAP_RADIO 0x00040000 //This is a radio receiver.
V4L2_CAP_MODULATOR 0x00080000 //The device has some sort of modulator to emit RF-modulated video/audio signals. For more information about modulator programming see Section 1.6, “Tuners and Modulators”.
V4L2_CAP_READWRITE 0x01000000 //The device supports the read() and/or write() I/O methods.
V4L2_CAP_ASYNCIO 0x02000000 //The device supports the asynchronous I/O methods.
V4L2_CAP_STREAMING 0x04000000 //The device supports the streaming I/O method.
其中我们最关心的是V4L2_CAP_VIDEO_CAPTURE 与 V4L2_CAP_STREAMING。
操作代码如下:
struct v4l2_capability cap;
if (-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) {
if (EINVAL == errno) {
fprintf (stderr, "%s is no V4L2 device/n",dev_name);
exit (EXIT_FAILURE);
} else {
errno_exit ("VIDIOC_QUERYCAP");
}
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
fprintf (stderr, "%s is no video capture device\n",dev_name);
exit (EXIT_FAILURE);
}
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
fprintf (stderr, "%s does not support streaming i/o\n",dev_name);
exit (EXIT_FAILURE);
}
//查询驱动支持的数据格式
struct v4l2_format format;
memset(&format,0,sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(fd,VIDIOC_G_FMT,&format))
{
perror("VIDIOC_G_FMT(VIDEO_CAPTURE)");
return -1;
}
//struct v4l2_format数据结构定义如下:
struct v4l2_format
{
enum v4l2_buf_type type;
union
{
struct v4l2_pix_format pix;
struct v4l2_window win;
struct v4l2_vbi_format vbi;
struct v4l2_sliced_vbi_format sliced;
__u8 raw_data[200];
} fmt;
};
//v4l2_buf_type type; 输入信息,让用户选择是哪种类型的设备。这里又与Driver中对应起来了。
enum v4l2_buf_type
{
V4L2_BUF_TYPE_VIDEO_CAPTURE
V4L2_BUF_TYPE_VIDEO_OUTPUT
V4L2_BUF_TYPE_VIDEO_OVERLAY
V4L2_BUF_TYPE_VBI_CAPTURE
V4L2_BUF_TYPE_VBI_OUTPUT
V4L2_BUF_TYPE_SLICED_VBI_CAPTURE
V4L2_BUF_TYPE_SLICED_VBI_OUTPUT
V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY
V4L2_BUF_TYPE_PRIVATE
}
//最重要的是struct v4l2_pix_format pix; 这个也是新手最容出错的地方!!
struct v4l2_pix_format
{
__u32 width; //帧宽度
__u32 height; //桢高度
__u32 pixelformat; //像素格式。例如:V4L2_PIX_FMT_YUYV,V4L2_PIX_FMT_RGB332等。
enum v4l2_field field; //image包含逐行数据还是隔行数据。
__u32 bytesperline; //每行多少字节,通过width,height,pixelformat可以算出
__u32 sizeimage; //每桢多少字节。也可以算出。但需要加入field信息才能算出
enum v4l2_colorspace colorspace;
__u32 priv;
};
pixelformat—像素格式有十几种之多,设备驱动不可能支持输出所有的数据格式,而我们要将摄像头中的图像数据正确的解析就必须首先搞明白我的视频驱动支持输出什么格式!在我的开发板与ubuntu系统下,同一个usb摄像头输出的格式对比如下:
arm开发板上支持的pixelformat:
ubuntu下支持的pixelformat:
在我开始研究摄像头初期,看到网上很多人将摄像头数据读出后直接写入文件就能使用看图软件查看图像,而我按照他们的程序实验了好久都不行!最后走了相当多的弯路才发现,原来一切原因出自这里—pixelformat!! 此处暂时不展开讨论pixelformat,等文章最后再展开详细说明,这里只要记住这个字段很重要即可。
通过格式查询我已经知道我的系统只支持YUYV4:2:2格式输出,所以,格式设置已经没什么好设置了,主要是设置图像宽度、高度信息等。
struct v4l2_format fmt;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 320;
fmt.fmt.pix.height = 240;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (-1 == xioctl (fd, VIDIOC_S_FMT, &fmt))
errno_exit ("VIDIOC_S_FMT");
V4L2支持两种方式来采集图像:内存映射方式(mmap)和直接读取方式(read)。前者一般用于连续视频数据的采集,后者常用于静态图片数据的采集。
下文主要讲解mmap方式,对于mmap方式,网友【东月之神】总结的很好,此处直接引用【东月之神】的博文。
mmap视频采集流程如下:
(1)申请若干视频采集的帧缓冲区,并将这些帧缓冲区从内核空间映射到用户空间,便于应用程序读取/处理视频数据;
(2)将申请到的帧缓冲区在视频采集输入队列排队,并启动视频采集;
(3)驱动开始视频数据的采集,应用程序从视频采集输出队列取出帧缓冲区,处理完后,将帧缓冲区重新放入视频采集输入队列,循环往复采集连续的视频数据;
循环流程如下:
操作源码如下:
struct v4l2_requestbuffers req;
struct buffer * buffers = NULL;
req.count = 4; //缓冲区数量
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;//设置为mmap方式
ioctl (fd, VIDIOC_REQBUFS, &req); //申请缓冲
if (req.count < 2)
printf("Insufficient buffer memory\n");
buffers = calloc (req.count, sizeof (*buffers));//内存中建立对应空间
for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
{
struct v4l2_buffer buf; //驱动中的一帧
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == ioctl (fd, VIDIOC_QUERYBUF, &buf)) //映射用户空间
printf ("VIDIOC_QUERYBUF error\n");
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap (NULL /* start anywhere */, //通过mmap建立映射关系
buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
fd, buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start)
printf ("mmap failed\n");
}
for (i = 0; i < n_buffers; ++i)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == ioctl (fd, VIDIOC_QBUF, &buf))//申请到的缓冲进入列队
printf ("VIDIOC_QBUF failed\n");
}
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
//开始捕捉图像数据
if (-1 == ioctl (fd, VIDIOC_STREAMON, &type))
printf ("VIDIOC_STREAMON failed\n");
数据帧格式转换需要明白颜色空间、图像格式等概念,祥见下文。
ioctl来操作总结:
int ioctl (int __fd,unsigned long int __request,...);
__request是V4L2一些ioctl命令,常见如下.
VIDIOC_REQBUFS:分配内存
VIDIOC_QUERYBUF:把VIDIOC_REQBUFS中分配的数据缓存转换成物理地址
VIDIOC_QUERYCAP:查询驱动功能
VIDIOC_ENUM_FMT:获取当前驱动支持的视频格式
VIDIOC_S_FMT:设置当前驱动的频捕获格式
VIDIOC_G_FMT:读取当前驱动的频捕获格式
VIDIOC_TRY_FMT:验证当前驱动的显示格式
VIDIOC_CROPCAP:查询驱动的修剪能力
VIDIOC_S_CROP:设置视频信号的边框
VIDIOC_G_CROP:读取视频信号的边框
VIDIOC_QBUF:把数据从缓存中读取出来
VIDIOC_DQBUF:把数据放回缓存队列
VIDIOC_STREAMON:开始视频显示函数
VIDIOC_STREAMOFF:结束视频显示函数
VIDIOC_QUERYSTD:检查当前视频设备支持的标准,例如PAL或NTSC。
完整的IOCTL命令参见http://v4l2spec.bytesex.org/spec/r7624.htm
要说明视频格式就要先引述一段比较高大上的概念:颜色空间。
推荐两篇文章:
1、《Camera图像处理原理及实例分析》
作者:刘旭晖 [email protected]
主页:http://rgbbones.googlepages.com/
2、《YUV / RGB 格式及快速转换算法》–该文不能明确找到出处,好像来自[jackyhwei]罗索实验室http://www.rosoo.net/a/201003/8943.html
关于颜色空间的简要说明如下:
颜色空间有许多种,常用有RGB,YUV、CMY,HSV、HSI等。
即使只是RGB、YUV这两大类色彩空间,所涉及到的知识也是十分丰富复杂的,自知不具备足够的相关专业知识,所以本文主要针对工程领域的应用及算法进行讨论。
我们在数字电子多媒体领域所谈到的YUV格式,实际上准确的说,是以YcrCb色彩空间模型为基础的具有多种存储格式的一类颜色模型的家族(包括YUV444 / YUV422 / YUV420 / YUV420P等等)。并不是传统意义上用于PAL制模拟电视的YUV模型。这些YUV模型的区别主要在于UV数据的采样方式和存储方式,这一点在V4L2文档中有详细描述。
有关YUV更详细的资料,可参考:http://www.fourcc.org/yuv.php
而在Camera Sensor中,最常用的YUV模型是 YUV422格式,因为它采用4个字节描述两个像素,能和RGB565模型比较好的兼容。有利于Camera Sensor和Camera controller的软硬件接口设计。
了解了颜色空间,接下来讨论读取到的一帧图像数据格式是什么?
还以我的开发板为例,arm开发板上摄像头的pixelformat如下:
YUV 4:2:2(YUYV)是什么意思?
YUV格式通常有两大类:打包(packed)格式和平面(planar)格式。前者将YUV分量存放在同一个数组中,通常是几个相邻的像素组成一个宏像素(macro-pixel);而后者使用三个数组分开存放YUV三个分量,就像是一个三维平面一样。下表中的YUY2到Y211都是打包格式,而IF09到YVU9都是平面格式。(注意:在介绍各种具体格式时,YUV各分量都会带有下标,如Y0、U0、V0表示第一个像素的YUV分量,Y1、U1、V1表示第二个像素的YUV分量,以此类推。)
YUY2(和YUYV)格式为每个像素保留Y分量,而UV分量在水平方向上每两个像素采样一次。一个宏像素为4个字节,实际表示2个像素。(4:2:2的意思为一个宏像素中有4个Y分量、2个U分量和2个V分量。)图像数据中YUV分量排列顺序如下:
Y0 U0 Y1 V0 Y2 U2 Y3 V2 …
其他的格式与此类似,如下:
YVYU格式跟YUY2类似,只是图像数据中YUV分量的排列顺序有所不同:
Y0 V0 Y1 U0 Y2 V2 Y3 U2 …
UYVY格式跟YUY2类似,只是图像数据中YUV分量的排列顺序有所不同:
U0 Y0 V0 Y1 U2 Y2 V2 Y3 …
AYUV格式带有一个Alpha通道,并且为每个像素都提取YUV分量,图像数据格式如下:
A0 Y0 U0 V0 A1 Y1 U1 V1 …
虽然已经采集到了视频图像数据,但是YUV格式的图像我们并不能够直接看到,并且我们的液晶屏都是RGB格式的,所以就需要将YUV格式数据转换成RGB格式后才能显示在液晶屏上。
这里指的YUV实际是YcrCb了,YUV2RGB的转换公式本身是很简单的,但是牵涉到浮点运算,所以,如果要实现快速算法,算法结构本身没什么好研究的了,主要是采用整型运算或者查表来加快计算速度。
首先可以推导得到转换公式为:
R = Y + 1.4075 *(V-128)
G = Y – 0.3455 (U –128) – 0.7169 (V –128)
B = Y + 1.779 *(U – 128)
且慢,莫要高兴,因为RGB也是有很多格式的!在DirectShow中,常见的RGB格式有RGB1、RGB4、RGB8、RGB565、RGB555、RGB24、RGB32、ARGB32等;下面列举常见的RGB与YUV格式说明。
常见的RGB和YUV格式:
GUID | 格式描述 |
---|---|
MEDIASUBTYPE_RGB1 | 2色,每个像素用1位表示,需要调色板 |
MEDIASUBTYPE_RGB4 | 16色,每个像素用4位表示,需要调色板 |
MEDIASUBTYPE_RGB8 | 256色,每个像素用8位表示,需要调色板 |
MEDIASUBTYPE_RGB565 | 每个像素用16位表示,RGB分量分别使 |
MEDIASUBTYPE_RGB555 | 每个像素用16位表示,RGB分量都使用5位(剩下的1位不用) |
MEDIASUBTYPE_RGB24 | 每个像素用24位表示,RGB分量各使用8位 |
MEDIASUBTYPE_RGB32 | 每个像素用32位表示,RGB分量各使用8位(剩下的8位不用) |
MEDIASUBTYPE_ARGB32 | 每个像素用32位表示,RGB分量各使用8位(剩下的8位用于表示Alpha通道值) |
MEDIASUBTYPE_YUY2 | YUY2格式,以4:2:2方式打包 |
MEDIASUBTYPE_YUYV | YUYV格式(实际格式与YUY2相同) |
MEDIASUBTYPE_YVYU | YVYU格式,以4:2:2方式打包 |
MEDIASUBTYPE_UYVY | UYVY格式,以4:2:2方式打包 |
MEDIASUBTYPE_AYUV | 带Alpha通道的4:4:4 YUV格式 |
MEDIASUBTYPE_Y41P | Y41P格式,以4:1:1方式打包 |
MEDIASUBTYPE_Y411 | Y411格式(实际格式与Y41P相同) |
MEDIASUBTYPE_Y211 | Y211格式 |
MEDIASUBTYPE_IF09 | IF09格式 |
MEDIASUBTYPE_IYUV | IYUV格式 |
MEDIASUBTYPE_YV12 | YV12格式 |
MEDIASUBTYPE_YVU9 | YVU9格式 |
是不是要晕了?我也是。如此多的格式我们如何才能正确的进行视频显示呢?
其实只关心两点即可:
(1)输入的格式是什么?我的是YUV422格式。
(2)输出的格式是什么?我的是RGB24格式。
ok,数据转换程序如下:
/*yuv4:2:2格式转换为rgb24格式*/
int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
uint pixel32 = 0;
uchar *pixel = (uchar *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}
设备信息查询网友Andrew Huang [email protected]写了一个相当全的程序,本处引用如下:
/*
* Author: Andrew Huang
* query v4l2 device capabilities
* date: 2010/06/16
*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#define PRINT_V4L printf
#define PRINT_IOCTL_CMD(cmd) printf("%s = (0x%x)\n",#cmd,cmd)
#define PRINT_IOCTL_CMD2(cmd,n) printf(" %-20s(%d) = (0x%x)\n",#cmd,n,cmd)
#define PRINT_V4L_MEMBER(m) printf(" %-20s:",#m);
#define PRINT_V4L_STRING(m) printf(" %-20s:\"%s\"\n",#m,m)
#define PRINT_V4L_INT(m) printf(" %-20s:%d\n",#m,m)
#define PRINT_V4L_INTX(m) printf(" %-20s:0x%x\n",#m,m)
/* 显示enum 值的之一*/
#define TEST_V4L_FLAG0(m,f) printf("%s",((m & f)==f)?#f:"")
/* 显示一个组合值的所有内容 */
#define PRINT_V4L_FLAGS(m) printf(" %-20s:0x%x [",#m,m) /* 组合值开始*/
#define TEST_V4L_FLAG(m,f) printf("%s",((m & f)==f)?#f",":"") /* 组合值中间值,可以有多个*/
#define PRINT_V4L_FLAGS2(m) printf("]\n") /* 组合值结束*/
#define PRINT_INT(e) printf("%s=%d\n",#e,e)
#ifdef __ARM_LINUX__
//arm linux下,下面的定义不支持
#define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0xffff
#define V4L2_CAP_HW_FREQ_SEEK 0xffff
#define V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY 0xffffff
#define V4L2_PIX_FMT_RGB444 0xffff
#define V4L2_PIX_FMT_Y16 0xffff
#define V4L2_PIX_FMT_PAL8 0xffffff
#define V4L2_PIX_FMT_YUV444 0xffff
#define V4L2_PIX_FMT_YUV555 0xffff
#define V4L2_PIX_FMT_YUV565 0xffffff
#define V4L2_PIX_FMT_YUV32 0xffff
#define V4L2_PIX_FMT_SGBRG8 0xffff
#define V4L2_PIX_FMT_SGRBG10 0xffffff
#define V4L2_PIX_FMT_SPCA501 0xffff
#define V4L2_PIX_FMT_SGRBG10DPCM8 0xffff
#define V4L2_PIX_FMT_SBGGR16 0xffffff
#define V4L2_PIX_FMT_SPCA505 0xffff
#define V4L2_PIX_FMT_SPCA508 0xffffff
#define V4L2_PIX_FMT_SPCA561 0xffff
#define V4L2_PIX_FMT_PAC207 0xffff
#define V4L2_FIELD_INTERLACED_TB 0xffffff
#define V4L2_PIX_FMT_YVYU 0xffff
#define V4L2_FIELD_INTERLACED_BT 0xffff
#endif
int v4l2_query_video_cap(int fd)
{
int ret;
struct v4l2_capability cap;
ret = ioctl(fd, VIDIOC_QUERYCAP, &cap);
if(ret < 0)
{
printf("get vidieo capability error,error code: %d \n", errno);
return ret;
}
PRINT_V4L("general info\n");
PRINT_IOCTL_CMD(VIDIOC_QUERYCAP);
PRINT_V4L_STRING(cap.driver);
PRINT_V4L_STRING(cap.card);
PRINT_V4L_STRING(cap.bus_info);
PRINT_V4L_MEMBER(cap.version);printf("%u.%u.%u\n",(cap.version>>16)&0XFF, (cap.version>>8)&0XFF,cap.version&0xFF);
PRINT_V4L_FLAGS(cap.capabilities);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_VIDEO_CAPTURE);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_VIDEO_OUTPUT);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_VIDEO_OVERLAY);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_VBI_CAPTURE);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_VBI_OUTPUT);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_SLICED_VBI_CAPTURE);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_SLICED_VBI_OUTPUT);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_RDS_CAPTURE);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_VIDEO_OUTPUT_OVERLAY);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_HW_FREQ_SEEK);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_TUNER);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_AUDIO);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_RADIO);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_READWRITE);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_ASYNCIO);
TEST_V4L_FLAG(cap.capabilities,V4L2_CAP_STREAMING);
PRINT_V4L_FLAGS2(cap.capabilities);
return 0;
}
static void _show_capture_type(enum v4l2_buf_type type)
{
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_VIDEO_CAPTURE);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_VIDEO_OUTPUT);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_VIDEO_OVERLAY);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_VBI_CAPTURE);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_VBI_OUTPUT);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_SLICED_VBI_CAPTURE);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_SLICED_VBI_OUTPUT);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY);
TEST_V4L_FLAG(type,V4L2_BUF_TYPE_PRIVATE);
}
//void show_video_std(v4l2_std_id std)
void show_video_std(v4l2_std_id std)
{
printf("%s:std=%x\n",__FUNCTION__,std);
TEST_V4L_FLAG(std,V4L2_STD_PAL_B);
TEST_V4L_FLAG(std,V4L2_STD_PAL_B1);
TEST_V4L_FLAG(std,V4L2_STD_PAL_G);
TEST_V4L_FLAG(std,V4L2_STD_PAL_H);
TEST_V4L_FLAG(std,V4L2_STD_PAL_I);
TEST_V4L_FLAG(std,V4L2_STD_PAL_D);
TEST_V4L_FLAG(std,V4L2_STD_PAL_D1);
TEST_V4L_FLAG(std,V4L2_STD_PAL_M);
TEST_V4L_FLAG(std,V4L2_STD_PAL_N);
TEST_V4L_FLAG(std,V4L2_STD_PAL_Nc);
TEST_V4L_FLAG(std,V4L2_STD_PAL_60);
TEST_V4L_FLAG(std,V4L2_STD_NTSC_M);
TEST_V4L_FLAG(std,V4L2_STD_NTSC_M_JP);
TEST_V4L_FLAG(std,V4L2_STD_NTSC_443);
TEST_V4L_FLAG(std,V4L2_STD_NTSC_M_KR);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_B);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_D);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_G);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_H);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_K);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_K1);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_L);
TEST_V4L_FLAG(std,V4L2_STD_SECAM_LC);
TEST_V4L_FLAG(std,V4L2_STD_ATSC_8_VSB);
TEST_V4L_FLAG(std,V4L2_STD_ATSC_16_VSB);
}
int v4l2_enum_video_input(int fd)
{
struct v4l2_input input;
int ret;
memset(&input, 0, sizeof(input));//initialize i.index=0;
PRINT_V4L("inputs\n");
while(ret=ioctl(fd, VIDIOC_ENUMINPUT, &input)>=0)
{
PRINT_IOCTL_CMD2(VIDIOC_ENUMINPUT,input.index);
PRINT_V4L_INT(input.index);
PRINT_V4L_STRING(input.name);
PRINT_V4L_MEMBER(input.type);
TEST_V4L_FLAG0(input.type,V4L2_INPUT_TYPE_TUNER);
TEST_V4L_FLAG0(input.type,V4L2_INPUT_TYPE_CAMERA);
PRINT_V4L("\n");
PRINT_V4L_INT(input.audioset);
PRINT_V4L_INT(input.tuner);
PRINT_V4L_FLAGS(input.std);
show_video_std(input.std);
PRINT_V4L_FLAGS2(input.std);
//printf("input i.name:%s; i.index :%d i.type = %s\n",i.name, i.index, i.type);
input.index++;
}
return ret;
}
static void _show_pixel_format(unsigned int pixelformat )
{
if(pixelformat == 0)
{
printf("unknow pixelformat %d\n",pixelformat);
return ;
}
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB332);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB444);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB555);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB565);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB555X);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB565X);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_BGR24);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB24);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_BGR32);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_RGB32);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_GREY);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_Y16);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_PAL8);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YVU410);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YVU420);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUYV);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_UYVY);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV422P);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV411P);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_Y41P);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV444);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV555);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV565);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV32);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_NV12);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_NV21);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV410);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YUV420);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YYUV);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_HI240);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_HM12);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SBGGR8);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SGBRG8);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SGRBG10);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SGRBG10DPCM8);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SBGGR16);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_MJPEG);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_JPEG);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_DV);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_MPEG);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_WNVA);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SN9C10X);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_PWC1);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_PWC2);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_ET61X251);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SPCA501);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SPCA505);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SPCA508);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_SPCA561);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_PAC207);
TEST_V4L_FLAG(pixelformat,V4L2_PIX_FMT_YVYU);
}
int v4l2_enum_video_format(int fd,enum v4l2_buf_type type,char * title)
{
struct v4l2_fmtdesc fmtdesc;
int i=0;
memset(&fmtdesc, 0, sizeof(fmtdesc));
PRINT_V4L("%s\n",title);
for(i=0;;i++)
{
fmtdesc.index = i;
fmtdesc.type = type;
if(-1== ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc))
break;
PRINT_IOCTL_CMD(VIDIOC_ENUM_FMT);
PRINT_V4L_INT(fmtdesc.index);
PRINT_V4L_FLAGS(fmtdesc.type);
_show_capture_type(fmtdesc.type);
PRINT_V4L_FLAGS2(fmtdesc.type);
PRINT_V4L_INT(fmtdesc.flags);
PRINT_V4L_STRING(fmtdesc.description);
PRINT_V4L_FLAGS(fmtdesc.pixelformat);
_show_pixel_format(fmtdesc.pixelformat);
PRINT_V4L_FLAGS2(fmtdesc.pixelformat);
}
return i;
}
#define v4l2_enum_foramt(fd,type) v4l2_enum_video_format(fd,type,#type)
#define v4l2_enum_video_capture(fd) v4l2_enum_foramt(fd,V4L2_BUF_TYPE_VIDEO_CAPTURE)
#define v4l2_enum_video_overlay(fd) v4l2_enum_foramt(fd,V4L2_BUF_TYPE_VIDEO_OVERLAY)
#define v4l2_enum_video_vbi_capture(fd) v4l2_enum_foramt(fd,V4L2_CAP_VBI_CAPTURE)
void _show_v4l2_field(unsigned v4l2_filed)
{
PRINT_V4L_MEMBER(v4l2_filed);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_ANY);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_NONE);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_TOP);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_BOTTOM);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_INTERLACED);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_SEQ_TB);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_SEQ_BT);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_ALTERNATE);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_INTERLACED_TB);
TEST_V4L_FLAG0(v4l2_filed,V4L2_FIELD_INTERLACED_BT);
PRINT_V4L("\n");
}
int v4l2_get_capture_format(int fd)
{
struct v4l2_format format;
memset(&format,0,sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(fd,VIDIOC_G_FMT,&format))
{
perror("VIDIOC_G_FMT(VIDEO_CAPTURE)");
return -1;
}
PRINT_IOCTL_CMD(VIDIOC_G_FMT);
PRINT_V4L_FLAGS(format.type);
_show_capture_type(format.type);
PRINT_V4L_FLAGS2(format.type);
PRINT_V4L_INT(format.fmt.pix.width);
PRINT_V4L_INT(format.fmt.pix.height);
_show_pixel_format(format.fmt.pix.pixelformat);
PRINT_V4L_INT(format.fmt.pix.bytesperline);
PRINT_V4L_INT(format.fmt.pix.sizeimage);
_show_v4l2_field(format.fmt.pix.field);
return 0;
}
int main(int argc,char * argv[])
{
char dev_name[64] = "/dev/video0";
int cam_fd =-1;
if(argc>1)
{
strncpy(dev_name,argv[1],sizeof(dev_name)-1);
}
printf("open device %s\n",dev_name);
cam_fd = open(dev_name,O_RDWR|O_NONBLOCK);
if(cam_fd == -1)
{
printf("open failure \n");
return -1;
}
printf("## v4l2 device info [%s] ##\n",dev_name);
v4l2_query_video_cap(cam_fd);
v4l2_enum_video_input(cam_fd);
v4l2_enum_video_capture(cam_fd);
v4l2_get_capture_format(cam_fd);
close(cam_fd);
return 0;
}
采集到的图像数据要保存为jpeg图像,首先必须移植jpeg库。然后在程序中调用jpeg压缩接口。
本处直接上我写好的程序,本程序连续采集4张图片后退出:
//由于程序中调用了libjpeg库,所以,程序编译时需要加上-ljpeg.
//ubuntu 下安装jpeg库命令为:apt-get install libjpeg62
//arm上需要自己编译、移植jpeg库
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#define CLEAR(x) memset (&(x), 0, sizeof (x))
#define uchar unsigned char
#define uint unsigned int
struct buffer {
void * start;
size_t length;
};
//static char * dev_name = "/dev/video0";//摄像头设备名
static int fd = -1;
struct buffer * buffers = NULL;
static unsigned int n_buffers = 0;
FILE *file_fd;
static unsigned long file_length;
static unsigned char *file_name;
/*jpeg压缩函数
*@rgb:指向存放rgb24数据的起始地址
*@width:屏幕(分辨率)的宽度
*@height:屏幕(分辨率)的高度
*/
int jpeg_compress(unsigned char *rgb,int width,int height)
{
struct jpeg_compress_struct cinfo;
struct jpeg_error_mgr jerr;
FILE * outfile;
JSAMPROW row_pointer[1];
int row_stride;
static int count = 0;
char name[19];
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_compress(&cinfo);
sprintf(name,"out%d.jpg",count++);
//输出文件名为:out.jpg
if ((outfile = fopen(name, "wb")) == NULL)
{
printf("can not open out.jpg\n");
return -1;
}
jpeg_stdio_dest(&cinfo, outfile);
cinfo.image_width = width;
cinfo.image_height = height;
cinfo.input_components = 3;
//输入数据格式为RGB
cinfo.in_color_space = JCS_RGB;
jpeg_set_defaults(&cinfo);
//压缩质量为80
jpeg_set_quality(&cinfo, 80, TRUE );
jpeg_start_compress(&cinfo, TRUE);
row_stride = width * 3;
while (cinfo.next_scanline < cinfo.image_height)
{
row_pointer[0] = &rgb[cinfo.next_scanline * row_stride];
(void) jpeg_write_scanlines(&cinfo, row_pointer, 1);
}
jpeg_finish_compress(&cinfo);
fclose(outfile);
jpeg_destroy_compress(&cinfo);
if(count > 4) //连续采集4张图像
return 1;
return 0;
}
/*yuv4:2:2格式转换为rgb24格式*/
int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
uint pixel32 = 0;
uchar *pixel = (uchar *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}
int convert_yuv_to_rgb_buffer(uchar *yuv, uchar *rgb, uint width,uint height)
{
uint in, out = 0;
uint pixel_16;
uchar pixel_24[3];
uint pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4) {
pixel_16 =
yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] << 8 |
yuv[in + 0];//YUV422每个像素2字节,每两个像素共用一个Cr,Cb值,即u和v,RGB24每个像素3个字节
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];//rgb的一个像素
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return 0;
}
//////////////////////////////////////////////////////
//获取一帧数据
//////////////////////////////////////////////////////
static int read_frame (void)
{
struct v4l2_buffer buf;
unsigned int i;
static int count = 0;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
ioctl (fd, VIDIOC_DQBUF, &buf); //出列采集的帧缓冲
assert (buf.index < n_buffers);
printf ("buf.index dq is %d,\n",buf.index);
printf ("buffers[0].length is %d,\n",buffers[buf.index].length);
unsigned char *rgbbuf = malloc(320*240*3);//RGB24
if( rgbbuf == NULL )
{
printf("malloc faile!\n");
exit(1);
}
convert_yuv_to_rgb_buffer(buffers[buf.index].start,rgbbuf,320,240);
//jpeg压缩
if(jpeg_compress(rgbbuf,320,240)<0)
{
printf("compress failed!\n");
exit(2);
}
free(rgbbuf);
rgbbuf = NULL;
ioctl (fd, VIDIOC_QBUF, &buf); //再将其入列
if(count++>4)
return 1;
return 0;
}
int main (int argc,char ** argv)
{
struct v4l2_capability cap;
struct v4l2_format fmt;
unsigned int i;
enum v4l2_buf_type type;
char dev_name[64] = "/dev/video0";
if(argc>1)
{
strncpy(dev_name,argv[1],sizeof(dev_name)-1);
}
fd = open (dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);//打开设备
ioctl (fd, VIDIOC_QUERYCAP, &cap);//获取摄像头参数
CLEAR (fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 320;
fmt.fmt.pix.height = 240;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if(ioctl (fd, VIDIOC_S_FMT, &fmt) < 0) //设置图像格式
{
printf("----------------------\nVIDIOC_S_FMT failed!--------------------\n");
}
file_length = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; //计算图片大小
struct v4l2_requestbuffers req;
CLEAR (req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ioctl (fd, VIDIOC_REQBUFS, &req); //申请缓冲,count是申请的数量
if (req.count < 2)
printf("Insufficient buffer memory\n");
buffers = calloc (req.count, sizeof (*buffers));//内存中建立对应空间
for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
{
struct v4l2_buffer buf; //驱动中的一帧
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == ioctl (fd, VIDIOC_QUERYBUF, &buf)) //映射用户空间
printf ("VIDIOC_QUERYBUF error\n");
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap (NULL /* start anywhere */, //通过mmap建立映射关系
buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
fd, buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start)
printf ("mmap failed\n");
}
for (i = 0; i < n_buffers; ++i)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == ioctl (fd, VIDIOC_QBUF, &buf))//申请到的缓冲进入列队
printf ("VIDIOC_QBUF failed\n");
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (fd, VIDIOC_STREAMON, &type)) //开始捕捉图像数据
printf ("VIDIOC_STREAMON failed\n");
for (;;) //这一段涉及到异步IO
{
fd_set fds;
struct timeval tv;
int r;
FD_ZERO (&fds);//将指定的文件描述符集清空
FD_SET (fd, &fds);//在文件描述符集合中增加一个新的文件描述符
/* Timeout. */
tv.tv_sec = 2;
tv.tv_usec = 0;
r = select (fd + 1, &fds, NULL, NULL, &tv);//判断是否可读(即摄像头是否准备好),tv是定时
if (-1 == r) {
if (EINTR == errno)
continue;
printf ("select err\n");
}
if (0 == r) {
fprintf (stderr, "select timeout\n");
exit (EXIT_FAILURE);
}
if (read_frame ())//如果可读,执行read_frame ()函数,并跳出循环
break;
}
ioctl (fd, VIDIOC_STREAMOFF, &type);//最后要先关闭视频采集
unmap:
for (i = 0; i < n_buffers; ++i)
if (-1 == munmap (buffers[i].start, buffers[i].length))
printf ("munmap error");
close (fd);
exit (EXIT_SUCCESS);
return 0;
}