编译命令
$ gcc -std=c99 cameraDemo.c
我测试用的uvc摄像头,里面分辨率和数据格式写死的,大家可以根据自己摄像头读出来的参数做修改。
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#define camera_print(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#define camera_dbg(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#define camera_err(...) (printf("L%d(%s):", __LINE__, __FILE__), \
printf(__VA_ARGS__))
#define ALIGN_16B(x) (((x) + (15)) & ~(15))
#define N_WIN_SIZES 20
#define NUM_SUPPORT_FMT 30
struct buffer
{
void *start[3];
size_t length[3];
};
typedef struct camera_hal
{
int videofd;
int driver_type;
unsigned int win_width;
unsigned int win_height;
int photo_num;
unsigned int pixelformat;
int nplanes;
int buf_count;
struct buffer *buffers;
}camera_handle;
struct v4l2_frmsize
{
unsigned int width;
unsigned int height;
};
struct vfe_format
{
unsigned char name[32];
unsigned int fourcc;
struct v4l2_frmsize size[N_WIN_SIZES];
};
static struct vfe_format support_fmt[NUM_SUPPORT_FMT];
static struct vfe_format sensor_formats[NUM_SUPPORT_FMT];
char *get_format_name(unsigned int format)
{
if(format == V4L2_PIX_FMT_YUV422P)
return "YUV422P";
else if(format == V4L2_PIX_FMT_YUV420)
return "YUV420";
else if(format == V4L2_PIX_FMT_YVU420)
return "YVU420";
else if(format == V4L2_PIX_FMT_NV16)
return "NV16";
else if(format == V4L2_PIX_FMT_NV12)
return "NV12";
else if(format == V4L2_PIX_FMT_NV61)
return "NV61";
else if(format == V4L2_PIX_FMT_NV21)
return "NV21";
else if(format == V4L2_PIX_FMT_HM12)
return "MB YUV420";
else if(format == V4L2_PIX_FMT_YUYV)
return "YUYV";
else if(format == V4L2_PIX_FMT_YVYU)
return "YVYU";
else if(format == V4L2_PIX_FMT_UYVY)
return "UYVY";
else if(format == V4L2_PIX_FMT_VYUY)
return "VYUY";
else if(format == V4L2_PIX_FMT_MJPEG)
return "MJPEG";
else if(format == V4L2_PIX_FMT_H264)
return "H264";
else
return NULL;
}
int save_frame_to_file(void *str,void *start,int length)
{
FILE *fp = NULL;
fp = fopen(str, "wb+"); //save more frames
if(!fp)
{
printf(" Open %s error\n", (char *)str);
return -1;
}
if(fwrite(start, length, 1, fp))
{
fclose(fp);
return 0;
}
else
{
printf(" Write file fail (%s)\n",strerror(errno));
fclose(fp);
return -1;
}
return 0;
}
int main(int argc, char *argv[])
{
struct v4l2_capability cap; /* Query device capabilities */
struct v4l2_fmtdesc fmtdesc; /* Enumerate image formats */
struct v4l2_frmsizeenum frmsize; /* Enumerate frame sizes */
struct v4l2_format fmt; /* try a format */
struct v4l2_input inp; /* select the current video input */
struct v4l2_streamparm parms; /* set streaming parameters */
struct v4l2_requestbuffers req; /* Initiate Memory Mapping or User Pointer I/O */
struct v4l2_buffer buf;
camera_handle camera;
int index;
memset(&camera, 0, sizeof(camera_handle));
camera_dbg("camera test!\n");
/* 1.open /dev/videoX node */
camera.videofd = open("/dev/video0", O_RDWR, 0);
if(camera.videofd < 0){
camera_dbg(" open %s fail!!!\n", "/dev/video0");
return -1;
}
camera_dbg("open successful!\n");
/* 2.Query device capabilities */
memset(&cap, 0, sizeof(cap));
if(ioctl(camera.videofd,VIDIOC_QUERYCAP,&cap) < 0){
camera_err(" Query device capabilities fail!!!\n");
}else{
camera_dbg(" Querey device capabilities succeed\n");
camera_dbg(" cap.driver=%s\n",cap.driver);
camera_dbg(" cap.card=%s\n",cap.card);
camera_dbg(" cap.bus_info=%s\n",cap.bus_info);
camera_dbg(" cap.version=0x%08x\n",cap.version);
camera_dbg(" cap.capabilities=0x%08x\n",cap.capabilities);
}
if((cap.capabilities & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) <= 0){
camera_err(" The device is not supports the Video Capture interface!!!\n");
close(camera.videofd);
return -1;
}
if(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE){
camera_dbg("V4L2_CAP_VIDEO_CAPTURE_MPLANE\n");
camera.driver_type = V4L2_CAP_VIDEO_CAPTURE_MPLANE;
}else if(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE){
camera_dbg("V4L2_CAP_VIDEO_CAPTURE\n");
camera.driver_type = V4L2_CAP_VIDEO_CAPTURE;
}else{
camera_err(" %s is not a capture device.\n","/dev/video0");
close(camera.videofd);
return -1;
}
/* 3.select the current video input */
inp.index = 0;
inp.type = V4L2_INPUT_TYPE_CAMERA;
if(ioctl(camera.videofd,VIDIOC_S_INPUT,&inp) < 0){
camera_err(" VIDIOC_S_INPUT failed! s_input: %d\n",inp.index);
close(camera.videofd);
return -1;
}
/* 4.Enumerate image formats */
camera_dbg("**********************************************************\n");
camera_dbg(" enumerate image formats\n");
memset(support_fmt, 0, sizeof(support_fmt));
fmtdesc.index = 0;
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
index = 0;
while(ioctl(camera.videofd,VIDIOC_ENUM_FMT,&fmtdesc) == 0)
{
/* save image formats */
if(get_format_name(fmtdesc.pixelformat) != NULL){
memcpy(support_fmt[index].name, fmtdesc.description, sizeof(fmtdesc.description));
support_fmt[index].fourcc = fmtdesc.pixelformat;
camera_dbg(" format index = %d, name = %s\n", index, get_format_name(fmtdesc.pixelformat));
index++;
}
fmtdesc.index++;
}
/* 5.try all format,here resolution is not important */
int enumFmtIndex = 0;
int formatNum = 0;
camera.win_width = 1280;
camera.win_height = 720;
memset(&fmt,0,sizeof(fmt));
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = camera.win_width;
fmt.fmt.pix_mp.height = camera.win_height;
fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
}else{
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = camera.win_width;
fmt.fmt.pix.height = camera.win_height;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
}
memset(sensor_formats,0,sizeof(sensor_formats));
camera_dbg("*********************************************************\n");
camera_dbg(" The sensor supports the following formats :\n");
while(support_fmt[enumFmtIndex].fourcc != 0)
{
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
fmt.fmt.pix_mp.pixelformat = support_fmt[enumFmtIndex].fourcc;
else
fmt.fmt.pix.pixelformat = support_fmt[enumFmtIndex].fourcc;
if(ioctl(camera.videofd,VIDIOC_TRY_FMT,&fmt) == 0){
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
sensor_formats[formatNum].fourcc = fmt.fmt.pix_mp.pixelformat;
else
sensor_formats[formatNum].fourcc = fmt.fmt.pix.pixelformat;
camera_dbg(" Index %d : %s.\n",formatNum,get_format_name(sensor_formats[formatNum].fourcc));
formatNum++;
}
enumFmtIndex++;
}
/* 6.Enumerate frame sizes */
enumFmtIndex = 0;
while(sensor_formats[enumFmtIndex].fourcc != 0)
{
memset(&frmsize,0,sizeof(frmsize));
frmsize.index = 0;
frmsize.pixel_format = sensor_formats[enumFmtIndex].fourcc;
camera_dbg("**********************************************************\n");
camera_dbg(" The %s supports the following resolutions:\n",get_format_name(sensor_formats[enumFmtIndex].fourcc));
while(ioctl(camera.videofd,VIDIOC_ENUM_FRAMESIZES,&frmsize) == 0){
if(frmsize.type == V4L2_FRMSIZE_TYPE_CONTINUOUS){
sensor_formats[enumFmtIndex].size[frmsize.index].width = frmsize.stepwise.max_width;
sensor_formats[enumFmtIndex].size[frmsize.index].height = frmsize.stepwise.max_height;
camera_dbg(" Index %d : %u * %u\n",frmsize.index,frmsize.stepwise.max_width,frmsize.stepwise.max_height);
}else{
sensor_formats[enumFmtIndex].size[frmsize.index].width = frmsize.discrete.width;
sensor_formats[enumFmtIndex].size[frmsize.index].height = frmsize.discrete.height;
camera_dbg(" Index %d : %u * %u\n",frmsize.index,frmsize.discrete.width,frmsize.discrete.height);
}
frmsize.index++;
if(frmsize.index > N_WIN_SIZES-1){
camera_err(" Beyond the maximum queryable range, please modify N_WIN_SIZES.\n");
break;
}
}
/* next frame */
enumFmtIndex++;
}
int i;
camera.pixelformat = V4L2_PIX_FMT_MJPEG;
camera_print("**********************************************************\n");
camera_print(" Using format parameters %s.\n", get_format_name(camera.pixelformat));
for(i = 0; sensor_formats[i].fourcc != 0; i++)
if(camera.pixelformat == sensor_formats[i].fourcc)
break;
/* find support pixelformat */
if(sensor_formats[i].fourcc == 0){
camera_err(" sensor not support %s\n", get_format_name(camera.pixelformat));
camera_print(" Use support for the first format and resolution\n");
camera.pixelformat = sensor_formats[0].fourcc;
camera.win_width = sensor_formats[0].size[0].width;
camera.win_height = sensor_formats[0].size[0].height;
}else{
int j,sizeindex;
for(j=0; sensor_formats[j].fourcc!=0; j++)
if(camera.pixelformat == sensor_formats[j].fourcc)
break;
for(sizeindex=0;sensor_formats[j].size[sizeindex].width!=0; sizeindex++)
if(camera.win_width == sensor_formats[j].size[sizeindex].width && camera.win_height == sensor_formats[j].size[sizeindex].height)
break;
if(sensor_formats[j].size[sizeindex].width == 0){
camera_err(" sensor not support %u * %u\n", camera.win_width, camera.win_height);
camera_print(" use support for the first resolution\n");
if(sensor_formats[j].size[0].width != 0 && sensor_formats[j].size[0].height != 0){
camera.win_width = sensor_formats[j].size[0].width;
camera.win_height = sensor_formats[j].size[0].height;
}else{
/* mandatory settings */
camera.win_width = 640;
camera.win_height = 480;
}
}
}
camera_print(" camera pixelformat: %s\n",get_format_name(camera.pixelformat));
camera_print(" Resolution size : %u * %u\n",camera.win_width,camera.win_height);
camera.photo_num = 5;
camera_print(" The number of photos taken is %d.\n",camera.photo_num);
/* 7.set streaming parameters */
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
parms.parm.capture.timeperframe.numerator = 1;
parms.parm.capture.timeperframe.denominator = 30;
if(ioctl(camera.videofd,VIDIOC_S_PARM,&parms) < 0){
camera_err(" Setting streaming parameters failed, numerator:%d denominator:%d\n",
parms.parm.capture.timeperframe.numerator,
parms.parm.capture.timeperframe.denominator);
close(camera.videofd);
return -1;
}
/* 8.get streaming parameters */
memset(&parms,0,sizeof(struct v4l2_streamparm));
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
parms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(camera.videofd,VIDIOC_G_PARM,&parms) < 0){
camera_err(" Get streaming parameters failed!\n");
}else{
camera_print(" Camera capture framerate is %u/%u\n",
parms.parm.capture.timeperframe.denominator, \
parms.parm.capture.timeperframe.numerator);
}
/* 9.set the data format */
memset(&fmt, 0, sizeof(struct v4l2_format));
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = camera.win_width;
fmt.fmt.pix_mp.height = camera.win_height;
fmt.fmt.pix_mp.pixelformat = camera.pixelformat;
fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
}else{
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = camera.win_width;
fmt.fmt.pix.height = camera.win_height;
fmt.fmt.pix.pixelformat = camera.pixelformat;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
}
if (ioctl(camera.videofd, VIDIOC_S_FMT, &fmt) < 0){
camera_err(" setting the data format failed!\n");
close(camera.videofd);
return -1;
}
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
if(camera.win_width != fmt.fmt.pix_mp.width || camera.win_height != fmt.fmt.pix_mp.height)
camera_err(" does not support %u * %u\n", camera.win_width, camera.win_height);
camera.win_width = fmt.fmt.pix_mp.width;
camera.win_height = fmt.fmt.pix_mp.height;
camera_print(" VIDIOC_S_FMT succeed\n");
camera_print(" fmt.type = %d\n",fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n",fmt.fmt.pix_mp.width);
camera_print(" fmt.fmt.pix.height = %d\n",fmt.fmt.pix_mp.height);
camera_print(" fmt.fmt.pix.pixelformat = %s\n",get_format_name(fmt.fmt.pix_mp.pixelformat));
camera_print(" fmt.fmt.pix.field = %d\n",fmt.fmt.pix_mp.field);
if (ioctl(camera.videofd, VIDIOC_G_FMT, &fmt) < 0)
camera_err(" get the data format failed!\n");
camera.nplanes = fmt.fmt.pix_mp.num_planes;
}else{
if(camera.win_width != fmt.fmt.pix.width || camera.win_height != fmt.fmt.pix.height)
camera_err(" does not support %u * %u\n", camera.win_width, camera.win_height);
camera.win_width = fmt.fmt.pix.width;
camera.win_height = fmt.fmt.pix.height;
camera_print(" VIDIOC_S_FMT succeed\n");
camera_print(" fmt.type = %d\n",fmt.type);
camera_print(" fmt.fmt.pix.width = %d\n",fmt.fmt.pix.width);
camera_print(" fmt.fmt.pix.height = %d\n",fmt.fmt.pix.height);
camera_print(" fmt.fmt.pix.pixelformat = %s\n",get_format_name(fmt.fmt.pix.pixelformat));
camera_print(" fmt.fmt.pix.field = %d\n",fmt.fmt.pix.field);
}
/* 10.Initiate Memory Mapping or User Pointer I/O */
memset(&req, 0, sizeof(struct v4l2_requestbuffers));
req.count = 3;
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if(ioctl(camera.videofd, VIDIOC_REQBUFS, &req) < 0){
camera_err(" VIDIOC_REQBUFS failed\n");
close(camera.videofd);
return -1;
}
/* Query the status of a buffers */
camera.buf_count = req.count;
camera_dbg(" reqbuf number is %d\n",camera.buf_count);
camera.buffers = calloc(req.count, sizeof(struct buffer));
int n_buffers;
for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
memset(&buf, 0, sizeof(struct v4l2_buffer));
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
buf.length = camera.nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
}
if (ioctl(camera.videofd, VIDIOC_QUERYBUF, &buf) == -1) {
camera_err(" VIDIOC_QUERYBUF error\n");
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
free(camera.buffers);
close(camera.videofd);
return -1;
}
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
for(int i = 0; i < camera.nplanes; i++){
camera.buffers[n_buffers].length[i] = buf.m.planes[i].length;
camera.buffers[n_buffers].start[i] = mmap(NULL , buf.m.planes[i].length,
PROT_READ | PROT_WRITE, \
MAP_SHARED , camera.videofd, \
buf.m.planes[i].m.mem_offset);
camera_dbg(" map buffer index: %d, mem: %p, len: %x, offset: %x\n",
n_buffers, camera.buffers[n_buffers].start[i],buf.m.planes[i].length,
buf.m.planes[i].m.mem_offset);
}
free(buf.m.planes);
}else{
camera.buffers[n_buffers].length[0] = buf.length;
camera.buffers[n_buffers].start[0] = mmap(NULL , buf.length,
PROT_READ | PROT_WRITE, \
MAP_SHARED , camera.videofd, \
buf.m.offset);
camera_dbg(" map buffer index: %d, mem: %p, len: %x, offset: %x\n", \
n_buffers, camera.buffers[n_buffers].start[0],buf.length,buf.m.offset);
}
}
/* 11.Exchange a buffer with the driver */
for(n_buffers = 0; n_buffers < req.count; n_buffers++) {
memset(&buf, 0, sizeof(struct v4l2_buffer));
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory= V4L2_MEMORY_MMAP;
buf.index= n_buffers;
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
buf.length = camera.nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(buf.length, sizeof(struct v4l2_plane));
}
if (ioctl(camera.videofd, VIDIOC_QBUF, &buf) == -1) {
camera_err(" VIDIOC_QBUF error\n");
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
free(camera.buffers);
close(camera.videofd);
return -1;
}
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
}
enum v4l2_buf_type type;
/* streamon */
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(camera.videofd, VIDIOC_STREAMON, &type) == -1) {
camera_err(" VIDIOC_STREAMON error! %s\n",strerror(errno));
goto EXIT;
}else
camera_print(" stream on succeed\n");
int np = 0;
int ret = -1;
int width;
int height;
char source_data_path[64];
struct timeval tv;
fd_set fds;
FD_ZERO(&fds);
FD_SET(camera.videofd, &fds);
memset(&buf, 0, sizeof(struct v4l2_buffer));
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
buf.length = camera.nplanes;
buf.m.planes = (struct v4l2_plane *)calloc(camera.nplanes, sizeof(struct v4l2_plane));
}
while (np < camera.photo_num)
{
camera_print(" capture num is [%d]\n",np);
tv.tv_sec = 2;
tv.tv_usec = 0;
ret = select(camera.videofd+1,&fds,NULL,NULL, &tv);
if (ret == -1){
camera_err(" select error\n");
continue;
}else if (ret == 0){
camera_err(" select timeout,end capture thread!\n");
break;
}
/* dqbuf */
ret = ioctl(camera.videofd, VIDIOC_DQBUF, &buf);
if (ret == 0)
camera_dbg("*****DQBUF[%d] FINISH*****\n",buf.index);
else
camera_err("****DQBUF FAIL*****\n");
/* check the data buf for byte alignment */
if(camera.pixelformat == V4L2_PIX_FMT_YUV422P || camera.pixelformat == V4L2_PIX_FMT_YUYV
|| camera.pixelformat == V4L2_PIX_FMT_YVYU || camera.pixelformat == V4L2_PIX_FMT_UYVY
|| camera.pixelformat == V4L2_PIX_FMT_VYUY){
if(camera.win_width*camera.win_height*2 < buf.bytesused){
width = ALIGN_16B(camera.win_width);
height = ALIGN_16B(camera.win_height);
}else{
width = camera.win_width;
height = camera.win_height;
}
}else if(camera.pixelformat == V4L2_PIX_FMT_YUV420 || camera.pixelformat == V4L2_PIX_FMT_YVU420
|| camera.pixelformat == V4L2_PIX_FMT_NV16 || camera.pixelformat == V4L2_PIX_FMT_NV61
|| camera.pixelformat == V4L2_PIX_FMT_NV12 || camera.pixelformat == V4L2_PIX_FMT_NV21){
if(camera.win_width*camera.win_height*1.5 < buf.bytesused){
width = ALIGN_16B(camera.win_width);
height = ALIGN_16B(camera.win_height);
}else{
width = camera.win_width;
height = camera.win_height;
}
}else{
width = camera.win_width;
height = camera.win_height;
}
/* YUV data */
if(camera.pixelformat != V4L2_PIX_FMT_MJPEG && camera.pixelformat != V4L2_PIX_FMT_H264){
camera_dbg("yuv data type!\n");
}else{ /* MJPEG or H264 */
sprintf(source_data_path, "%s/%s%d.%s", "./", "source_data", np+1, get_format_name(camera.pixelformat));
save_frame_to_file(source_data_path, camera.buffers[buf.index].start[0], buf.bytesused);
}
/* qbuf */
if (ioctl(camera.videofd, VIDIOC_QBUF, &buf) == 0)
camera_dbg("************QBUF[%d] FINISH**************\n",buf.index);
else
camera_err("*****QBUF FAIL*****\n");
np++;
}
/* streamoff */
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(camera.videofd, VIDIOC_STREAMOFF, &type) == -1)
camera_err(" VIDIOC_STREAMOFF error! %s\n",strerror(errno));
EXIT:
/* munmap camera->buffers */
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE){
for (int i = 0; i < camera.buf_count; ++i)
for (int j = 0; j < camera.nplanes; j++)
munmap(camera.buffers[i].start[j], camera.buffers[i].length[j]);
}else{
for(int i=0; i < camera.buf_count; i++)
munmap(camera.buffers[i].start[0],camera.buffers[i].length[0]);
}
if(camera.driver_type == V4L2_CAP_VIDEO_CAPTURE_MPLANE)
free(buf.m.planes);
free(camera.buffers);
close(camera.videofd);
return 0;
}