android直接用v4l2采集图片数据


2013-03-20 22:24:59|  分类: Android |  标签:android_v4l2   |字号 订阅

目的是直接采用android中的(确切的是linux内核中)的v4l2进行一张照片的拍摄。
利用v4l2进行视频数据的采集,有很多教程,参照v4l2的示例也可以。下面就是我的采集一帧数据并保存的代码。

 
  
#include
#include
#include
#include
#include
#include
#include

#include "jpeglib.h"     //移植libjpeg库实现yuv——>jpg图片的转换

//#define ANDROID_ENV
#ifdef ANDROID_ENV
#define LOG LOGV
#else
#define LOG printf
#endif
#define CAMERA_DEVICE "/dev/video0"
#define CAPTURE_FILE "frame.yuv420"
#define VIDEO_WIDTH 720
#define VIDEO_HEIGHT 480
#define VIDEO_FORMAT V4L2_PIX_FMT_YUV420    //这里是输出的数据格式,要是摄像头支持jpeg的,可以直接把数据
   //V4L2_PIX_FMT_MJPEG         //保存为jpg格式的图片,如果是yuv的则需要转换,转换在下篇介绍。
                   //V4L2_PIX_FMT_JPEG    //这里主要是把yuv格式保存为文件
                   //V4L2_PIX_FMT_YUYV
                   //V4L2_PIX_FMT_YUV420
                   //V4L2_PIX_FMT_RGB32
#define BUFFER_COUNT 4

struct fimc_buffer {
int length ;
void * start ;
} framebuf [ BUFFER_COUNT ];

//这个函数是把yuv数据转换为jpg图片的函数。
int write_JPEG_file (const char * filename, unsigned char* yuvData, int quality,int image_width,int image_height);

int main ()
{
int i , ret ;

// Open Device
int fd;
fd = open(CAMERA_DEVICE, O_RDWR, 0);
if (fd < 0) {
LOG("Open %s failed\n", CAMERA_DEVICE);
return -1;
}
// Query Capability
struct v4l2_capability cap;
ret = ioctl(fd, VIDIOC_QUERYCAP, &cap);
if (ret < 0) {
LOG("VIDIOC_QUERYCAP failed (%d)\n", ret);
return ret;
}
// Print capability infomations
LOG("Capability Informations:\n");
LOG(" driver: %s\n", cap.driver);
LOG(" card: %s\n", cap.card);
LOG(" bus_info: %s\n", cap.bus_info);
LOG(" version: %08X\n", cap.version);
LOG(" capabilities: %08X\n", cap.capabilities);
///////////////////////
struct v4l2_fmtdesc fmtdesc; fmtdesc.index=0; fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE; printf("Support format:\n");

while ( ioctl ( fd , VIDIOC_ENUM_FMT , & fmtdesc ) != - 1 )
{
printf ( "\t%d.%s\n" , fmtdesc . index + 1 , fmtdesc . description );
fmtdesc . index ++;
}
/////////////////////
// Set Stream Format
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = VIDEO_WIDTH;
fmt.fmt.pix.height = VIDEO_HEIGHT;
fmt.fmt.pix.pixelformat = VIDEO_FORMAT;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
ret = ioctl(fd, VIDIOC_S_FMT, &fmt);
if (ret < 0) {
LOG("VIDIOC_S_FMT failed (%d)\n", ret);
return ret;
}
// Get Stream Format
ret = ioctl(fd, VIDIOC_G_FMT, &fmt);
if (ret < 0) {
LOG("VIDIOC_G_FMT failed (%d)\n", ret);
return ret;
}
// Print Stream Format
LOG("Stream Format Informations:\n");
LOG(" type: %d\n", fmt.type);
LOG(" width: %d\n", fmt.fmt.pix.width);
LOG(" height: %d\n", fmt.fmt.pix.height);
char fmtstr[8];
memset(fmtstr, 0, 8);
memcpy(fmtstr, &fmt.fmt.pix.pixelformat, 4);
LOG(" pixelformat: %s\n", fmtstr);
LOG(" field: %d\n", fmt.fmt.pix.field);
LOG(" bytesperline: %d\n", fmt.fmt.pix.bytesperline);
LOG(" sizeimage: %d\n", fmt.fmt.pix.sizeimage);
LOG(" colorspace: %d\n", fmt.fmt.pix.colorspace);
LOG(" priv: %d\n", fmt.fmt.pix.priv);
LOG(" raw_date: %s\n", fmt.fmt.raw_data);
// Request buffers
struct v4l2_requestbuffers reqbuf;
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
reqbuf.count = BUFFER_COUNT;
ret = ioctl(fd , VIDIOC_REQBUFS, &reqbuf);
if(ret < 0) {
LOG("VIDIOC_REQBUFS failed (%d)\n", ret);
return ret;
}
// Queen buffers
struct v4l2_buffer buf;
for(i=0; i
// Query buffer
buf.index = i;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd , VIDIOC_QUERYBUF, &buf);
if(ret < 0) {
LOG("VIDIOC_QUERYBUF (%d) failed (%d)\n", i, ret);
return ret;
}
// mmap buffer
framebuf[i].length = buf.length;
framebuf[i].start = (char *) mmap(0, buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
if (framebuf[i].start == MAP_FAILED) {
LOG("mmap (%d) failed: %s\n", i, strerror(errno));
return -1;
}
// Queen buffer
ret = ioctl(fd , VIDIOC_QBUF, &buf);
if (ret < 0) {
LOG("VIDIOC_QBUF (%d) failed (%d)\n", i, ret);
return -1;
}
LOG("Frame buffer %d: address=0x%x, length=%d\n", i, (unsigned int)framebuf[i].start, framebuf[i].length);
}
// Stream On
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_STREAMON, &type);
if (ret < 0) {
LOG("VIDIOC_STREAMON failed (%d)\n", ret);
return ret;
}
// Get frame
ret = ioctl(fd, VIDIOC_DQBUF, &buf);
if (ret < 0) {
LOG("  (%d)\n", ret);
return ret;
}

// Process the frame
FILE *fp = fopen(CAPTURE_FILE, "wb");
if (fp < 0) {
LOG("open frame data file failed\n");
return -1;
}
fwrite(framebuf[buf.index].start, 1, buf.length, fp);  //保存yuv数据进文件
fclose(fp);
LOG("Capture one frame saved in %s\n", CAPTURE_FILE);
write_JPEG_file("test.jpg", framebuf[buf.index].start, 100, 720, 625); //生成jpg图片
LOG("Capture one frame saved in test.jpg\n");

// Re-queen buffer
ret = ioctl(fd, VIDIOC_QBUF, &buf);
if (ret < 0) { 
LOG("VIDIOC_QBUF failed (%d)\n", ret);
return ret;
}
// Release the resource
for (i=0; i
munmap(framebuf[i].start, framebuf[i].length);
}

close ( fd );
LOG ( "Camera test Done.\n" );
return 0 ;
}

你可能感兴趣的:(android,camera)