之前用的好好解析YUV,MJPEG,换了个核心板就不好使了,opencv3.4.6 >>>opencv4.5.5,Mat,cvMat,IplImage 的类型转换也不好使了。
//old version
#include "camera_thread.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
//#include "opencv2/core/types_c.h"
#include "device.h"
#include "qtimer.h"
#include "camera2.h"
#include "numeric"
int init_v4l2(void);
int v4l2_grab(void);
#define IMAGEWIDTH 640
#define IMAGEHEIGHT 480
#define FILE_VIDEO1 "/dev/video10"
#define TRUE 1
#define FALSE 0
char *deviceid;
using namespace cv;
QImageReader reader;
video_thread::video_thread():QThread()
{
quit_flag = false;
}
video_thread::~video_thread()
{
this->quit();
quit_flag = true;
show_picture_flag = false;
this->wait();
}
void video_thread::run()
{
int close_ret = 0;
if(device_video==1){
deviceid= "/dev/video0";
}else{
deviceid= "/dev/video2";
}
if(init_v4l2() == FALSE){
quit_flag = true;
exit(1);
}
if(v4l2_grab() == FALSE){
quit_flag = true;
exit(2);
}
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
isopen = true;
IplImage* img;
IplImage* img1;
CvMat cvmat;
double t;
QImage *qmg;
while(!quit_flag)
{
msleep(1);
// not show picture, continue next loop.
if ( !show_picture_flag )
continue;
t = (double)cvGetTickCount();
ioctl(fd,VIDIOC_DQBUF,&buf);
buf.index = 0;
cvmat =cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)buffer);//CV_8UC3
img = cvDecodeImage(&cvmat,1);
cv::Mat MImg=cv::cvarrToMat(img);//Mat
// MImg=MImg(cv::Rect(0,0,630,470));
cv::rotate(MImg,MImg,cv::ROTATE_180);
IplImage iplImage1(MImg);
img1=&iplImage1;
qmg=new QImage((unsigned char*)img1->imageData,img1->width,img1->height,img1->widthStep,QImage::Format_RGB888);
if(!img) qDebug("CAM2 No img\n");
cvReleaseImage(&img);
ioctl(fd,VIDIOC_QBUF,&buf);
t=(double)cvGetTickCount()-t;
if(device_video==1){
emit image_data(qmg->copy());
}
if(device_video==2){
emit image_data1(qmg->copy());
}
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
close_ret = ioctl(fd,VIDIOC_STREAMOFF,&type);
close_ret = close( fd );
if ( close_ret == 0 )
{
qDebug("close success!");
}
}
int video_thread::init_v4l2(void){
if ((fd = open(deviceid, O_RDWR)) == -1){
return FALSE;
}
if(fcntl(fd,F_SETFD, FD_CLOEXEC) == -1)
{
}
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1){
return FALSE;
}
else
{}
fmtdesc.index = 0;
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while(ioctl(fd,VIDIOC_ENUM_FMT,&fmtdesc) != -1){
fmtdesc.index++;
}
//set fmt
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = IMAGEWIDTH;
fmt.fmt.pix.height = IMAGEHEIGHT;
// fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //*************************V4L2_PIX_FMT_YUYV****************
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //*************************V4L2_PIX_FMT_YUYV****************
fmt.fmt.pix.field = V4L2_FIELD_NONE;
if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1){
// if (ioctl(fd, VIDIOC_TRY_FMT, &fmt) == -1){
return FALSE;
}
if(ioctl(fd,VIDIOC_G_FMT,&fmt) == -1){
return FALSE;
}
return TRUE;
}
int video_thread::v4l2_grab(void){
//struct v4l2_requestbuffers req = {0};
//4 request for 4 buffers
req.count = 1;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1)
{
return FALSE;
}
//5 mmap for buffers
buffer = (uchar*)malloc(req.count * sizeof(*buffer));
if(!buffer){
return FALSE;
}
unsigned int n_buffers;
for(n_buffers = 0;n_buffers < req.count; n_buffers++){
//struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if(ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1){
return FALSE;
}
buffer = (uchar*)mmap (NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
if(buffer == MAP_FAILED){
return FALSE;
}
}
//6 queue
for(n_buffers = 0;n_buffers
于是换一种打开方式:
头文件:
#ifndef MajorImageProcessingThread_H
#define MajorImageProcessingThread_H
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "LPF_V4L2.h"
#define IMAGEWIDTH 640
#define IMAGEHEIGHT 480
class MajorImageProcessingThread : public QThread
{
Q_OBJECT
public:
MajorImageProcessingThread();
QImage majorImage;
void stop();
void init(int index);
/* 帧描述信息 */
typedef struct Frame_Buffer{
unsigned char buf[IMAGEWIDTH*IMAGEHEIGHT*3];
int length;
}FrameBuffer;
///
// static struct buffer{
// void *start;
// unsigned int length;
// }*buffers;
struct buffer{
void *start;
unsigned int length;
}*buffers;
int buffers_length;
struct v4l2_capability cap;
struct v4l2_fmtdesc fmtdesc;
struct v4l2_frmsizeenum frmsizeenum;
struct v4l2_format format;
struct v4l2_queryctrl queryctrl;
struct v4l2_requestbuffers reqbuf;
struct v4l2_buffer buffer;
struct v4l2_streamparm streamparm;
//用户控制项ID
__u32 brightness_id; //亮度
__u32 contrast_id ; //对比度
__u32 saturation_id ; //饱和度
__u32 hue_id ; //色调
__u32 white_balance_temperature_auto_id; //白平衡色温(自动)
__u32 white_balance_temperature_id ; //白平衡色温
__u32 gamma_id ; //伽马
__u32 power_line_frequency_id ; //电力线频率
__u32 sharpness_id; //锐度,清晰度
__u32 backlight_compensation_id ; //背光补偿
//扩展摄像头项ID
__u32 exposure_auto_id ; //自动曝光
__u32 exposure_absolute_id ;
char runningDev[15]="";
char devName[15] ="";
char camName[32] ="";
char devFmtDesc[4]="" ;
int fd;
int videoIsRun ;
unsigned char *rgb24 ;
int WIDTH, HEIGHT;
// unsigned char *rgb24;
// int videoIsRun;
int majorindex;
bool stopped;
bool isopen;
QImage img;
int device;
int deviceIsOpen ;
void init_value();//init
int LPF_GetDeviceCount();
char *LPF_GetDeviceName(int index);
char *LPF_GetCameraName(int index);
int LPF_StartRun(int index);
// int LPF_GetFrame();
int LPF_GetFrame(FrameBuffer *framebuf);
int LPF_StopRun();
char *LPF_GetDevFmtDesc(int index);
int LPF_GetDevFmtWidth();
int LPF_GetDevFmtHeight();
int LPF_GetDevFmtSize();
int LPF_GetDevFmtBytesLine();
int LPF_GetResolutinCount();
int LPF_GetResolutionWidth(int index);
int LPF_GetResolutionHeight(int index);
int LPF_GetCurResWidth();
int LPF_GetCurResHeight();
int init_format();
int init_formatNew();
int v4l2_init_buffer();
void StartVideoPrePareNew();
void StartVideoPrePare();
void StartVideoStream();
void EndVideoStream();
void EndVideoStreamClear();
int test_device_exist(char *devName);
void LPF_GetDevControlAll();
///
protected:
void run();
private:
// volatile int majorindex;
// volatile bool stopped;
signals:
void SendMajorImageProcessing(QImage image, int result);
// void SendMajorImageProcessing(QPixmap pix, int result);
};
#endif // MajorImageProcessingThread_H
源文件:
#include "majorimageprocessingthread.h"
#define IMAGEWIDTH 640
#define IMAGEHEIGHT 480
#define TRUE 1
#define FALSE 0
#define FRAMEBUFFER_COUNT 4
MajorImageProcessingThread::MajorImageProcessingThread()
{
stopped = false;
majorindex = -1;
}
void MajorImageProcessingThread::stop()
{
stopped = true;
}
void MajorImageProcessingThread::init(int index)
{
stopped = false;
majorindex = index;
qDebug()<<"MajorImageProcessingThread::init";
init_value();
qDebug()<<"MajorImageProceddddssingThread::init";
}
void MajorImageProcessingThread::run()
{
if(majorindex != -1)
{
while(!stopped)
{
msleep(1000/30);
FrameBuffer frame;
QImage img;
QPixmap pix;
int ret = LPF_GetFrame(&frame);
if(ret == 0)
{
int WV = LPF_GetCurResWidth();
int HV = LPF_GetCurResHeight();
// img = QImage(rgb24, WV, HV, QImage::Format_RGB888);
pix.loadFromData(frame.buf, frame.length);
img= pix.toImage();
}
emit SendMajorImageProcessing(img, ret);
}
}
}
/
void MajorImageProcessingThread::init_value(){
// runningDev[15]="";
// devName[15] ="";
// camName[32] ="";
// devFmtDesc[4]="" ;
fd = -1;
videoIsRun = -1;
deviceIsOpen = -1;
rgb24 = NULL;
//用户控制项ID
brightness_id = -1; //亮度
contrast_id = -1; //对比度
saturation_id = -1; //饱和度
hue_id = -1; //色调
white_balance_temperature_auto_id = -1; //白平衡色温(自动)
white_balance_temperature_id = -1; //白平衡色温
gamma_id = -1; //伽马
power_line_frequency_id = -1; //电力线频率
sharpness_id = -1; //锐度,清晰度
backlight_compensation_id = -1; //背光补偿
//扩展摄像头项ID
exposure_auto_id = -1; //自动曝光
exposure_absolute_id = -1;
// static struct v4l2_capability cap;
// static struct v4l2_fmtdesc fmtdesc;
// static struct v4l2_frmsizeenum frmsizeenum;
// static struct v4l2_format format;
// static struct v4l2_queryctrl queryctrl;
// static struct v4l2_requestbuffers reqbuf;
// static struct v4l2_buffer buffer;
// static struct v4l2_streamparm streamparm;
// struct v4l2_capability cap;
// struct v4l2_fmtdesc fmtdesc;
// struct v4l2_frmsizeenum frmsizeenum;
// struct v4l2_format format;
// struct v4l2_queryctrl queryctrl;
// struct v4l2_requestbuffers reqbuf;
// struct v4l2_buffer buffer;
// struct v4l2_streamparm streamparm;
//
}
static int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = 0;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r ;
pixel[1] = g ;
pixel[2] = b ;
return pixel32;
}
static int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = 0;
unsigned int pixel_16;
unsigned char pixel_24[3];
unsigned int pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4)
{
pixel_16 =
yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] << 8 |
yuv[in + 0];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return 0;
}
void MajorImageProcessingThread::StartVideoPrePare()
{
init_format();
qDebug("当前视频帧大小<%d * %d>, 颜色空间:%d\n", format.fmt.pix.width, format.fmt.pix.height,format.fmt.pix.colorspace);
///
//申请帧缓存区
memset (&reqbuf, 0, sizeof (reqbuf));
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
reqbuf.count = 4;
if (-1 == ioctl (fd, VIDIOC_REQBUFS, &reqbuf)) {
if (errno == EINVAL){
qDebug()<< "Video capturing or mmap-streaming is not supported";
printf ("Video capturing or mmap-streaming is not supported\n");
}
else{
perror ("VIDIOC_REQBUFS");
qDebug()<< "buffers is Video capturing or mmap-streaming is not supported";
}
qDebug()<< "buffers error";
return;
}
//分配缓存区
// buffers = calloc (reqbuf.count, sizeof (*buffers));
buffers = static_cast(malloc(sizeof (*buffers)*reqbuf.count));//release
if(buffers == NULL){
perror("buffers is NULL");
qDebug()<< "buffers is NULL";
}else
assert (buffers != NULL);
//mmap内存映射
int i;
for (i = 0; i < (int)reqbuf.count; i++) {
memset (&buffer, 0, sizeof (buffer));
buffer.type = reqbuf.type;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i;
if (-1 == ioctl (fd, VIDIOC_QUERYBUF, &buffer)) {
qDebug()<< "perror VIDIOC_QUERYBUFVIDIOC_QUERYBUF buffers is NULL";
perror ("VIDIOC_QUERYBUF");
return;
}
buffers[i].length = buffer.length;
buffers[i].start = mmap (NULL, buffer.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
fd, buffer.m.offset);
if (MAP_FAILED == buffers[i].start) {
qDebug()<< "perror MAP_FAILED MAP_FAILED is MAP_FAILED";
perror ("mmap");
return;
}
}
//将缓存帧放到队列中等待视频流到来
unsigned int ii;
for(ii = 0; ii < reqbuf.count; ii++){
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = ii;
if (ioctl(fd,VIDIOC_QBUF,&buffer)==-1){
qDebug()<< "perror VIDIOC_QBUF failed";
perror("VIDIOC_QBUF failed");
}
}
WIDTH = LPF_GetCurResWidth();
HEIGHT = LPF_GetCurResHeight();
qDebug()<<"WIDTH"< 0)
return "";
memset(camName, 0, sizeof(camName));
char devname[15] = "";
strcpy(devname, LPF_GetDeviceName(index));
int fd = open(devname, O_RDWR);
if(ioctl(fd, VIDIOC_QUERYCAP, &cap) != -1)
{
strcpy(camName, (char *)cap.card);
}else{
qDebug()<<"error:LPF_GetCameraName";
}
close(fd);
qDebug()<<"LPF_GetCameraName"< 0 ) {//weiyl
qDebug()<<"LPF_StopRun";
// LPF_StopRun();
return -1;
}
char *devname = LPF_GetDeviceName(index);
fd = open(devname, O_RDWR);
if(fd == -1)
return -1;
deviceIsOpen = 1;
LPF_GetCameraName(index);
LPF_GetDevControlAll();
LPF_GetDevFmtDesc(index);
StartVideoPrePare();
// StartVideoPrePareNew();
StartVideoStream();
strcpy(runningDev, devname);
videoIsRun = 1;
return 0;
}
int MajorImageProcessingThread::LPF_GetFrame(FrameBuffer *framebuf)
{
if(videoIsRun > 0)
{
fd_set fds;
struct timeval tv;
int r;
FD_ZERO (&fds);
FD_SET (fd, &fds);
/* Timeout. */
tv.tv_sec = 7;
tv.tv_usec = 0;
r = select (fd + 1, &fds, NULL, NULL, &tv);
if (0 == r)
return -1;
else if(-1 == r)
return errno;
memset(&buffer, 0, sizeof(buffer));
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_DQBUF, &buffer) == -1) {
perror("GetFrame VIDIOC_DQBUF Failed");
qDebug()<<"VIDIOC_DQBUF Failed";
return errno;
}
else
{
//将buffer_infos中已使用的字节数copy到framebuf中
memcpy(framebuf->buf,(char *)buffers[buffer.index].start,buffer.bytesused);//bytesused 表示buf中已经使用的字节数
framebuf->length = buffer.bytesused;
// convert_yuv_to_rgb_buffer((unsigned char*)buffers[buffer.index].start, rgb24, WIDTH, HEIGHT);// error frames
if (ioctl(fd, VIDIOC_QBUF, &buffer) < 0) {
perror("GetFrame VIDIOC_QBUF Failed");
perror("GetFrame VIDIOC_QBUF Failed");
return errno;
}
return 0;
}
}
return 0;
}
int MajorImageProcessingThread::LPF_StopRun()
{
if(videoIsRun > 0)
{
EndVideoStream();
EndVideoStreamClear();
}
memset(runningDev, 0, sizeof(runningDev));
videoIsRun = -1;
deviceIsOpen = -1;
if(close(fd) != 0)
return -1;
return 0;
}
char *MajorImageProcessingThread::LPF_GetDevFmtDesc(int index)
{
memset(devFmtDesc, 0, sizeof(devFmtDesc));
fmtdesc.index=index;
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) != -1)
{
char fmt[5] = "";
sprintf(fmt, "%c%c%c%c",
(__u8)(fmtdesc.pixelformat&0XFF),
(__u8)((fmtdesc.pixelformat>>8)&0XFF),
(__u8)((fmtdesc.pixelformat>>16)&0XFF),
(__u8)((fmtdesc.pixelformat>>24)&0XFF));
strncpy(devFmtDesc, fmt, 4);
}
return devFmtDesc;
}
//获取图像的格式属性相关
int MajorImageProcessingThread::LPF_GetDevFmtWidth()
{
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
{
perror("GetDevFmtWidth:");
return -1;
}
return format.fmt.pix.width;
}
int MajorImageProcessingThread::LPF_GetDevFmtHeight()
{
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
{
perror("GetDevFmtHeight:");
return -1;
}
return format.fmt.pix.height;
}
int MajorImageProcessingThread::LPF_GetDevFmtSize()
{
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
{
perror("GetDevFmtSize:");
return -1;
}
return format.fmt.pix.sizeimage;
}
int MajorImageProcessingThread::LPF_GetDevFmtBytesLine()
{
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
{
perror("GetDevFmtBytesLine:");
return -1;
}
return format.fmt.pix.bytesperline;
}
//设备分辨率相关
int MajorImageProcessingThread::LPF_GetResolutinCount()
{
fmtdesc.index = 0;
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1)
return -1;
frmsizeenum.pixel_format = fmtdesc.pixelformat;
int i = 0;
for(i = 0; ; i++)
{
frmsizeenum.index = i;
if(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1)
break;
}
// /* 打印格式 */
// frmsizeenum.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// struct v4l2_frmivalenum frmival;
// frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// qDebug()<<"VIDIOC_ENUM_FRAMEINTERVALS"<", frmival.discrete.denominator / frmival.discrete.numerator);
// frmival.index++;
// }
return i;
}
int MajorImageProcessingThread::LPF_GetResolutionWidth(int index)
{
fmtdesc.index = 0;
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1)
return -1;
frmsizeenum.pixel_format = fmtdesc.pixelformat;
frmsizeenum.index = index;
if(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) != -1)
return frmsizeenum.discrete.width;
else
return -1;
}
int MajorImageProcessingThread::LPF_GetResolutionHeight(int index)
{
fmtdesc.index = 0;
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1)
return -1;
frmsizeenum.pixel_format = fmtdesc.pixelformat;
frmsizeenum.index = index;
if(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) != -1)
return frmsizeenum.discrete.height;
else
return -1;
}
int MajorImageProcessingThread::LPF_GetCurResWidth()
{
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_G_FMT, &format) == -1)
return -1;
return format.fmt.pix.width;
}
int MajorImageProcessingThread::LPF_GetCurResHeight()
{
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_G_FMT, &format) == -1)
return -1;
return format.fmt.pix.height;
}
int MajorImageProcessingThread::init_format()
{
//set fmt
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
format.fmt.pix.width = IMAGEWIDTH;
format.fmt.pix.height = IMAGEHEIGHT;
format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //*************************V4L2_PIX_FMT_YUYV****************
format.fmt.pix.field = V4L2_FIELD_NONE;
if (0 > ioctl(fd, VIDIOC_S_FMT, &format)) {
qDebug("set format failed\n");
return -1;
}
/* 判断是否已经设置为我们要求的MJPEG像素格式,否则表示该设备不支持MJPEG像素格式 */
if (V4L2_PIX_FMT_MJPEG != format.fmt.pix.pixelformat) {
qDebug("Error: the device does not support MJPEG format!\n");
return -1;
}
/* 获取streamparm */
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (0 > ioctl(fd, VIDIOC_G_PARM, &streamparm)) {
qDebug("get parm failed\n");
return -1;
}
WIDTH = LPF_GetCurResWidth();
HEIGHT = LPF_GetCurResHeight();
LPF_GetResolutinCount(); //review here
return 0;
}
int MajorImageProcessingThread::init_formatNew(){
// 判断是否是视频采集设备
if (!(V4L2_CAP_VIDEO_CAPTURE & cap.capabilities)) {
printf("Error:No capture video device!\n");
return -1;
}
printf("Device Information:\n");
printf("driver name: %s\ncard name: %s\n",cap.driver,cap.card);
printf("------------------------------------\n");
/* 查询摄像头所支持的所有像素格式 */
struct v4l2_fmtdesc fmtdesc;
fmtdesc.index = 0;
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support Format:\n");
/* 摄像头像素格式及其描述信息 */
typedef struct camera_format {
unsigned char description[32]; //字符串描述信息
unsigned int pixelformat; //像素格式
} cam_fmt;
cam_fmt cam_fmts[10];
/* 先将数组清空 */
memset(&cam_fmts,0,sizeof(cam_fmts));
while (0 == ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
// 将枚举出来的格式以及描述信息存放在数组中
cam_fmts[fmtdesc.index].pixelformat = fmtdesc.pixelformat;
memcpy(cam_fmts[fmtdesc.index].description, fmtdesc.description,sizeof(fmtdesc.description));
fmtdesc.index++;
}
/* 打印格式 */
struct v4l2_frmsizeenum frmsize;
struct v4l2_frmivalenum frmival;
int i;
frmsize.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for(i = 0; cam_fmts[i].pixelformat;i++){
printf("format<0x%x>, description<%s>\n", cam_fmts[i].pixelformat, cam_fmts[i].description);
/* 枚举出摄像头所支持的所有视频采集分辨率 */
frmsize.index = 0;
frmsize.pixel_format = cam_fmts[i].pixelformat;
frmival.pixel_format = cam_fmts[i].pixelformat;
while (0 == ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize)) {
printf("size<%d*%d> ",frmsize.discrete.width,frmsize.discrete.height);
frmsize.index++;
/* 获取摄像头视频采集帧率 */
frmival.index = 0;
frmival.width = frmsize.discrete.width;
frmival.height = frmsize.discrete.height;
while (0 == ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival)) {
printf("<%dfps>", frmival.discrete.denominator / frmival.discrete.numerator);
frmival.index++;
}
printf("\n");
}
printf("\n");
}
printf("-------------------------------------\n");
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
format.fmt.pix.width = IMAGEWIDTH;
format.fmt.pix.height = IMAGEHEIGHT;
format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //选择MJPEG
if (0 > ioctl(fd, VIDIOC_S_FMT, &format)) {
printf("set format failed\n");
return -1;
}
printf("set format success\n");
/* 判断是否已经设置为我们要求的MJPEG像素格式,否则表示该设备不支持MJPEG像素格式 */
if (V4L2_PIX_FMT_MJPEG != format.fmt.pix.pixelformat) {
printf("Error: the device does not support MJPEG format!\n");
return -1;
}
/* 获取实际的帧宽高度 */
printf("当前视频帧大小<%d * %d>, 颜色空间:%d\n", format.fmt.pix.width, format.fmt.pix.height,format.fmt.pix.colorspace);
/* 获取streamparm */
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (0 > ioctl(fd, VIDIOC_G_PARM, &streamparm)) {
printf("get parm failed\n");
return -1;
}
/* 判断是否支持帧率设置 */
if (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability) {
streamparm.parm.capture.timeperframe.numerator = 1;
streamparm.parm.capture.timeperframe.denominator = 30;//30fps
if (0 > ioctl(fd, VIDIOC_S_PARM, &streamparm)) {
printf("Error:device do not support set fps\n");
return -1;
}
}
return 0;
}
int MajorImageProcessingThread::v4l2_init_buffer(){
struct v4l2_requestbuffers reqbuf;
reqbuf.count = FRAMEBUFFER_COUNT; //帧缓冲的数量
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
if (0 > ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) {
printf("request buffer failed\n");
return -1;
}
printf("request buffer success\n");
/* 建立内存映射 */
struct v4l2_buffer buf;
unsigned int n_buffers = 0;
/* calloc函数为buffer_infos动态分配内存空间并初始化为0*/
// buffers = (struct buffer_info*)calloc(FRAMEBUFFER_COUNT,sizeof(struct buffers));
buffers = static_cast(malloc(sizeof (*buffers)*reqbuf.count));//release
for (n_buffers = 0; n_buffers < FRAMEBUFFER_COUNT; n_buffers++) {
memset(&buf,0,sizeof(buf));
buf.index = n_buffers;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if(0 > ioctl(fd, VIDIOC_QUERYBUF, &buf)){
printf("VIDIOC_QUERYBUF failed\n");
return -1;
};
buffers[n_buffers].start = mmap(NULL, buf.length,PROT_READ | PROT_WRITE, MAP_SHARED,fd, buf.m.offset);
buffers[n_buffers].length = buf.length;
if (MAP_FAILED == buffers[n_buffers].start) {
printf("mmap error\n");
return -1;
}
}
printf("memory map success\n");
/* 入队 */
for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++) {
if (0 > ioctl(fd, VIDIOC_QBUF, &buf)) {
printf("入队失败\n");
return -1;
}
}
return 0;
}
void MajorImageProcessingThread::StartVideoPrePareNew()
{
init_formatNew();
v4l2_init_buffer();
WIDTH = LPF_GetCurResWidth();
HEIGHT = LPF_GetCurResHeight();
rgb24 = (unsigned char*)malloc(WIDTH*HEIGHT*3*sizeof(char));
// qDebug()<<"rgb24"<
cam_recv头文件:
void ReceiveMajorImageL(QImage image, int result);
void ReceiveMajorImageR(QImage image, int result);
cam_recv源文件:
//相关部分
if(video_camera_left->deviceIsOpen || video_camera_right->deviceIsOpen){
connect(video_camera_left, SIGNAL(SendMajorImageProcessing(QImage, int)),
this, SLOT(ReceiveMajorImageL(QImage, int)));
///
void cam_recv::ReceiveMajorImageL(QImage image, int result)
{
//超时后关闭视频
//超时代表着VIDIOC_DQBUF会阻塞,直接关闭视频即可
if(result == -1)
{
video_camera_left->stop();
video_camera_left->wait();
video_camera_left->LPF_StopRun();
ui->cameraLeft->clear();
ui->cameraLeft->setText("获取设备图像超时!");
}
if(!image.isNull())
{
ui->cameraLeft->clear();
switch(result)
{
case 0: //Success
err11 = err19 = 0;
if(image.isNull()){
ui->cameraLeft->setText("画面丢失!");
}else{
// ui->cameraRight->setPixmap(image);
ui->cameraLeft->setPixmap(QPixmap::fromImage(image.scaled(ui->cameraLeft->size())));
}
break;
case 11: //Resource temporarily unavailable
err11++;
if(err11 == 10)
{
ui->cameraLeft->clear();
ui->cameraLeft->setText("设备已打开,但获取视频失败!\n请尝试切换USB端口后断开重试!");
}
break;
case 19: //No such device
err19++;
if(err19 == 10)
{
ui->cameraLeft->clear();
ui->cameraLeft->setText("设备丢失!");
}
break;
}
}
}
void cam_recv::ReceiveMajorImageR(QImage image, int result)
{
//超时后关闭视频
//超时代表着VIDIOC_DQBUF会阻塞,直接关闭视频即可
if(result == -1)
{
video_camera_right->stop();
video_camera_right->wait();
video_camera_right->LPF_StopRun();
ui->cameraRight->clear();
ui->cameraRight->setText("获取设备图像超时!");
}
if(!image.isNull())
{
ui->cameraRight->clear();
switch(result)
{
case 0: //Success
err11 = err19 = 0;
if(image.isNull())
ui->cameraRight->setText("画面丢失!");
else{
// ui->cameraRight->setPixmap(image);
ui->cameraRight->setPixmap(QPixmap::fromImage(image.scaled(ui->cameraLeft->size())));
}
break;
case 11: //Resource temporarily unavailable
err11++;
if(err11 == 10)
{
ui->cameraRight->clear();
ui->cameraRight->setText("设备已打开,但获取视频失败!\n请尝试切换USB端口后断开重试!");
}
break;
case 19: //No such device
err19++;
if(err19 == 10)
{
ui->cameraRight->clear();
ui->cameraRight->setText("设备丢失!");
}
break;
}
}
}
但是依然出现视频流花屏的问题,但是用系统自带的相机软件打开后,再运行程序又是正常的了。
debug 许久,一个个句柄排查,发现句柄顺序流程都是对的,最终最终终于发现花屏原因的了:
convert_yuv_to_rgb_buffer 解析数据流有问题导致花屏,该成:
//将buffer_infos中已使用的字节数copy到framebuf中
memcpy(framebuf->buf,(char *)buffers[buffer.index].start,buffer.bytesused);//bytesused 表示buf中已经使用的字节数
framebuf->length = buffer.bytesused;
QPixmap 再转化QImage就可以了:
FrameBuffer frame;
QImage img;
QPixmap pix;
int ret = LPF_GetFrame(&frame);
if(ret == 0)
{
int WV = LPF_GetCurResWidth();
int HV = LPF_GetCurResHeight();
pix.loadFromData(frame.buf, frame.length);
img= pix.toImage();
}