关于opencv更改摄像头参数(帧率,分辨率,曝光度……)的几个问题

1,适用于VideoCapture打开的摄像头

VideoCapture capture(0); 设置摄像头参数 不要随意修改

capture.set(CV_CAP_PROP_FRAME_WIDTH, 1080);//宽度

capture.set(CV_CAP_PROP_FRAME_HEIGHT, 960);//高度

capture.set(CV_CAP_PROP_FPS, 30);//帧率 帧/秒

capture.set(CV_CAP_PROP_BRIGHTNESS, 1);//亮度 

capture.set(CV_CAP_PROP_CONTRAST,40);//对比度 40

capture.set(CV_CAP_PROP_SATURATION, 50);//饱和度 50

capture.set(CV_CAP_PROP_HUE, 50);//色调 50

capture.set(CV_CAP_PROP_EXPOSURE, 50);//曝光 50 获取摄像头参数

得到摄像头的参数

capture.get(CV_CAP_PROP_FRAME_WIDTH);

capture.get(CV_CAP_PROP_FRAME_HEIGHT);

capture.get(CV_CAP_PROP_FPS);

capture.get(CV_CAP_PROP_BRIGHTNESS);

capture.get(CV_CAP_PROP_CONTRAST);

capture.get(CV_CAP_PROP_SATURATION);

capture.get(CV_CAP_PROP_HUE);

capture.get(CV_CAP_PROP_EXPOSURE); 获取视频参数:

capture.get(CV_CAP_PROP_FRAME_COUNT);//视频帧数 

然后你会发现除了个别参数你能更改之外(如曝光度),大分布你是不能更改的,甚至都没办法得到,这种并不适用

2,不做开发,只是单纯的更改

那么推荐一个软件,amcap,百度网盘链接,百度网盘-链接不存在,很简单很容易上手。

补,现在突然想起来我的一个学长告诉我的,利用这个软件调节摄像头的曝光度,可以改变帧率,且摄像头会记住曝光度的设置(其他特性就没有这个特点)。-2019.3.12

3,修改opencv的文件,不过效果可能和第一个差不多

大概是在opencv的这个位置,找一下,modules/highgui/src/cap_v4l.cpp,里面有关于参数的设置,位置比较靠前,可以搜索,也可以直接找到

大致在200多行

4,v4l2

下面是我找到的一篇参考,可以突破帧率的限制,当然前提是摄像头支持

Jetson TX1开发笔记(六):V4L2+OpenCV3.1以MJPG格式读取USB摄像头图像并实时显示_Jack-Cui的博客-CSDN博客

目前只适用于Linux系统,本人试验过,120帧的摄像头在只打开摄像头时可以达到100帧左右,设置的图片分辨率越小,能达到的帧率越高

 
  1. #include

  2. #include

  3. #include

  4. #include

  5. #include

  6. #include

  7. #include

  8. #include

  9. #include

  10. #include

  11. #include

  12. #include

  13. #include

  14. #include

  15. #include

  16. #include "opencv2/highgui/highgui.hpp"

  17. #include "opencv2/imgproc/imgproc.hpp"

  18. #include

  19. #include

  20. #include

  21. #include

  22. using namespace std;

  23. using namespace cv;

  24. #define CLEAR(x) memset(&(x), 0, sizeof(x))

  25. #define IMAGEWIDTH 3264

  26. #define IMAGEHEIGHT 2448

  27. #define WINDOW_NAME1 "【原始图】" //为窗口标题定义的宏

  28. #define WINDOW_NAME2 "【图像轮廓】" //为窗口标题定义的宏

  29. Mat g_srcImage; Mat g_grayImage;

  30. int g_nThresh = 90;

  31. int g_nMaxThresh = 255;

  32. RNG g_rng(12345);

  33. Mat g_cannyMat_output;

  34. vector > g_vContours;

  35. vector g_vHierarchy;

  36. Point point1[100000];

  37. Point point2[100000];

  38. Point point3[100000];

  39. int ii,iii;

  40. int flag2 = 0;//避障用

  41. float number = 0;

  42. int fps=0;

  43. class V4L2Capture {

  44. public:

  45. V4L2Capture(char *devName, int width, int height);

  46. virtual ~V4L2Capture();

  47. int openDevice();

  48. int closeDevice();

  49. int initDevice();

  50. int startCapture();

  51. int stopCapture();

  52. int freeBuffers();

  53. int getFrame(void **,size_t *);

  54. int backFrame();

  55. static void test();

  56. private:

  57. int initBuffers();

  58. struct cam_buffer

  59. {

  60. void* start;

  61. unsigned int length;

  62. };

  63. char *devName;

  64. int capW;

  65. int capH;

  66. int fd_cam;

  67. cam_buffer *buffers;

  68. unsigned int n_buffers;

  69. int frameIndex;

  70. };

  71. V4L2Capture::V4L2Capture(char *devName, int width, int height) {

  72. // TODO Auto-generated constructor stub

  73. this->devName = devName;

  74. this->fd_cam = -1;

  75. this->buffers = NULL;

  76. this->n_buffers = 0;

  77. this->frameIndex = -1;

  78. this->capW=width;

  79. this->capH=height;

  80. }

  81. V4L2Capture::~V4L2Capture() {

  82. // TODO Auto-generated destructor stub

  83. }

  84. int V4L2Capture::openDevice() {

  85. /*设备的打开*/

  86. printf("video dev : %s\n", devName);

  87. fd_cam = open(devName, O_RDWR);

  88. if (fd_cam < 0) {

  89. perror("Can't open video device");

  90. }

  91. return 0;

  92. }

  93. int V4L2Capture::closeDevice() {

  94. if (fd_cam > 0) {

  95. int ret = 0;

  96. if ((ret = close(fd_cam)) < 0) {

  97. perror("Can't close video device");

  98. }

  99. return 0;

  100. } else {

  101. return -1;

  102. }

  103. }

  104. int V4L2Capture::initDevice() {

  105. int ret;

  106. struct v4l2_capability cam_cap; //显示设备信息

  107. struct v4l2_cropcap cam_cropcap; //设置摄像头的捕捉能力

  108. struct v4l2_fmtdesc cam_fmtdesc; //查询所有支持的格式:VIDIOC_ENUM_FMT

  109. struct v4l2_crop cam_crop; //图像的缩放

  110. struct v4l2_format cam_format; //设置摄像头的视频制式、帧格式等

  111. /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/

  112. ret = ioctl(fd_cam, VIDIOC_QUERYCAP, &cam_cap);

  113. if (ret < 0) {

  114. perror("Can't get device information: VIDIOCGCAP");

  115. }

  116. printf(

  117. "Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",

  118. cam_cap.driver, cam_cap.card, cam_cap.bus_info,

  119. (cam_cap.version >> 16) & 0XFF, (cam_cap.version >> 8) & 0XFF,

  120. cam_cap.version & 0XFF);

  121. /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/

  122. cam_fmtdesc.index = 0;

  123. cam_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  124. printf("Support format:\n");

  125. while (ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -1) {

  126. printf("\t%d.%s\n", cam_fmtdesc.index + 1, cam_fmtdesc.description);

  127. cam_fmtdesc.index++;

  128. }

  129. /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/

  130. cam_cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  131. if (0 == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)) {

  132. printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",

  133. cam_cropcap.defrect.left, cam_cropcap.defrect.top,

  134. cam_cropcap.defrect.width, cam_cropcap.defrect.height);

  135. /* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/

  136. cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  137. cam_crop.c = cam_cropcap.defrect; //默认取景窗口大小

  138. if (-1 == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)) {

  139. //printf("Can't set crop para\n");

  140. }

  141. } else {

  142. printf("Can't set cropcap para\n");

  143. }

  144. /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/

  145. cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  146. cam_format.fmt.pix.width = capW;

  147. cam_format.fmt.pix.height = capH;

  148. cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //要和摄像头支持的类型对应

  149. cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;

  150. ret = ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);

  151. if (ret < 0) {

  152. perror("Can't set frame information");

  153. }

  154. /* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/

  155. cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  156. ret = ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);

  157. if (ret < 0) {

  158. perror("Can't get frame information");

  159. }

  160. printf("Current data format information:\n\twidth:%d\n\theight:%d\n",

  161. cam_format.fmt.pix.width, cam_format.fmt.pix.height);

  162. ret = initBuffers();

  163. if (ret < 0) {

  164. perror("Buffers init error");

  165. //exit(-1);

  166. }

  167. return 0;

  168. }

  169. int V4L2Capture::initBuffers() {

  170. int ret;

  171. /* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/

  172. struct v4l2_requestbuffers req;

  173. CLEAR(req);

  174. req.count = 4;

  175. req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  176. req.memory = V4L2_MEMORY_MMAP;

  177. ret = ioctl(fd_cam, VIDIOC_REQBUFS, &req);

  178. if (ret < 0) {

  179. perror("Request frame buffers failed");

  180. }

  181. if (req.count < 2) {

  182. perror("Request frame buffers while insufficient buffer memory");

  183. }

  184. buffers = (struct cam_buffer*) calloc(req.count, sizeof(*buffers));

  185. if (!buffers) {

  186. perror("Out of memory");

  187. }

  188. for (n_buffers = 0; n_buffers < req.count; n_buffers++) {

  189. struct v4l2_buffer buf;

  190. CLEAR(buf);

  191. // 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小

  192. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  193. buf.memory = V4L2_MEMORY_MMAP;

  194. buf.index = n_buffers;

  195. ret = ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);

  196. if (ret < 0) {

  197. printf("VIDIOC_QUERYBUF %d failed\n", n_buffers);

  198. return -1;

  199. }

  200. buffers[n_buffers].length = buf.length;

  201. //printf("buf.length= %d\n",buf.length);

  202. // 映射内存

  203. buffers[n_buffers].start = mmap(

  204. NULL, // start anywhere

  205. buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,

  206. buf.m.offset);

  207. if (MAP_FAILED == buffers[n_buffers].start) {

  208. printf("mmap buffer%d failed\n", n_buffers);

  209. return -1;

  210. }

  211. }

  212. return 0;

  213. }

  214. int V4L2Capture::startCapture() {

  215. unsigned int i;

  216. for (i = 0; i < n_buffers; i++) {

  217. struct v4l2_buffer buf;

  218. CLEAR(buf);

  219. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  220. buf.memory = V4L2_MEMORY_MMAP;

  221. buf.index = i;

  222. if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {

  223. printf("VIDIOC_QBUF buffer%d failed\n", i);

  224. return -1;

  225. }

  226. }

  227. enum v4l2_buf_type type;

  228. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  229. if (-1 == ioctl(fd_cam, VIDIOC_STREAMON, &type)) {

  230. printf("VIDIOC_STREAMON error");

  231. return -1;

  232. }

  233. return 0;

  234. }

  235. int V4L2Capture::stopCapture() {

  236. enum v4l2_buf_type type;

  237. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  238. if (-1 == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)) {

  239. printf("VIDIOC_STREAMOFF error\n");

  240. return -1;

  241. }

  242. return 0;

  243. }/*ok*/

  244. int V4L2Capture::freeBuffers() {

  245. unsigned int i;

  246. for (i = 0; i < n_buffers; ++i) {

  247. if (-1 == munmap(buffers[i].start, buffers[i].length)) {

  248. printf("munmap buffer%d failed\n", i);

  249. return -1;

  250. }

  251. }

  252. free(buffers);

  253. return 0;

  254. }

  255. int V4L2Capture::getFrame(void **frame_buf, size_t* len) {

  256. struct v4l2_buffer queue_buf;

  257. CLEAR(queue_buf);

  258. queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  259. queue_buf.memory = V4L2_MEMORY_MMAP;

  260. if (-1 == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)) {

  261. printf("VIDIOC_DQBUF error\n");

  262. return -1;

  263. }

  264. *frame_buf = buffers[queue_buf.index].start;

  265. *len = buffers[queue_buf.index].length;

  266. frameIndex = queue_buf.index;

  267. return 0;

  268. }

  269. int V4L2Capture::backFrame() {

  270. if (frameIndex != -1) {

  271. struct v4l2_buffer queue_buf;

  272. CLEAR(queue_buf);

  273. queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  274. queue_buf.memory = V4L2_MEMORY_MMAP;

  275. queue_buf.index = frameIndex;

  276. if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)) {

  277. printf("VIDIOC_QBUF error\n");

  278. return -1;

  279. }

  280. return 0;

  281. }

  282. return -1;

  283. }

  284. void V4L2Capture::test() {

  285. unsigned char *yuv422frame = NULL;

  286. unsigned long yuvframeSize = 0;

  287. string videoDev="/dev/video0";

  288. V4L2Capture *vcap = new V4L2Capture(const_cast(videoDev.c_str()),

  289. 1920, 1080);

  290. vcap->openDevice();

  291. vcap->initDevice();

  292. vcap->startCapture();

  293. vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);

  294. vcap->backFrame();

  295. vcap->freeBuffers();

  296. vcap->closeDevice();

  297. }

  298. void line2(Point point3[100000], int n)

  299. {

  300. float aa, bb, cc, dd, ee, ff, gg;

  301. int jj = 0;

  302. for (;jj

  303. {

  304. aa += point3[jj].x*point3[jj].x;

  305. bb += point3[jj].x;

  306. cc += point3[jj].x*point3[jj].y;

  307. dd += point3[jj].y;

  308. }

  309. ee = aa*n - bb*bb;

  310. if ((int)(ee* 100) != 0)

  311. {

  312. ff = (n*cc - bb*dd) / ee;

  313. gg = (dd - bb*ff) / n;

  314. }

  315. else {

  316. ff = 0;

  317. gg = 1;

  318. }

  319. Point point0, pointn;

  320. point0.y = 0;

  321. point0.x = gg;

  322. pointn.y = (n-1);

  323. pointn.x = ((n-1) * ff + gg);

  324. Mat draw_ing2 = Mat::zeros(g_cannyMat_output.size(), CV_8UC3);

  325. line(draw_ing2, point0, pointn, (255, 255, 255));

  326. imshow("10", draw_ing2);

  327. //cout << "\n"<

  328. float the =180*atan(ff)/3.14159;

  329. float dis = ff * 160+gg - 160;

  330. cout << the << " " << dis << endl;

  331. //正中心ff=0,gg=160,逆时ff为正,顺时ff为负

  332. }

  333. void findcolor(cv::Mat &image)

  334. {

  335. cv::Mat_::iterator it = image.begin();

  336. cv::Mat_::iterator itend = image.end();

  337. ii = 0;

  338. iii = 0;

  339. int flagg = 0;

  340. cv::Mat srcX(image.rows, image.cols , CV_32F);

  341. cv::Mat srcY(image.rows, image.cols, CV_32F);

  342. for (int i = 0;i < image.rows;i++)

  343. {

  344. for (int j = 0;j < image.cols;j++)

  345. {

  346. if (flagg == 0)/*这样遍历水平方向无法得到有效数据*/

  347. {

  348. if ((*it)[0] == 255 && (*it)[1] == 0 && (*it)[2] == 255)

  349. {

  350. flagg = 1;

  351. point1[ii].x = i;

  352. point1[ii].y = j;

  353. ii++;

  354. }

  355. }

  356. else

  357. {

  358. if ((*it)[0] == 255 && (*it)[1] == 0 && (*it)[2] == 255)

  359. {

  360. flagg = 0;

  361. point2[iii].x = i;

  362. point2[iii].y = j;

  363. iii++;

  364. }

  365. }

  366. if (it == itend)

  367. break;

  368. else it++;

  369. }

  370. }

  371. IplImage pImg = IplImage(image);

  372. CvArr* arr = (CvArr*)&pImg;

  373. int nn = ii;

  374. for (;ii > 0;ii--)

  375. {

  376. point3[ii].x = (point1[ii].x + point2[ii].x) / 2;

  377. point3[ii].y = (point1[ii].y + point2[ii].y) / 2;

  378. //circle(image, point3[ii], 1, (255, 255, 255));

  379. cvSet2D(arr, point3[ii].x, point3[ii].y, Scalar(255, 255, 255));

  380. }

  381. line2(point3, nn);

  382. }

  383. void on_ThreshChange(int, void* )

  384. {

  385. // 使用Canndy检测边缘

  386. Canny( g_grayImage, g_cannyMat_output, g_nThresh, g_nThresh*2, 3 );

  387. // 找到轮廓

  388. findContours( g_cannyMat_output, g_vContours, g_vHierarchy, RETR_TREE, CHAIN_APPROX_SIMPLE, Point(0, 0) );

  389. // 计算矩

  390. vector mu(g_vContours.size() );

  391. for(unsigned int i = 0; i < g_vContours.size(); i++ )

  392. { mu[i] = moments( g_vContours[i], false ); }

  393. // 计算中心矩

  394. vector mc( g_vContours.size() );

  395. for( unsigned int i = 0; i < g_vContours.size(); i++ )

  396. { mc[i] = Point2f( static_cast(mu[i].m10/mu[i].m00), static_cast(mu[i].m01/mu[i].m00 )); }

  397. // 绘制轮廓

  398. Mat drawing = Mat::zeros(g_cannyMat_output.size(), CV_8UC3);

  399. for( unsigned int i = 0; i< g_vContours.size(); i++ )

  400. {

  401. //Scalar color = Scalar( g_rng.uniform(0, 255), g_rng.uniform(0,255), g_rng.uniform(0,255) );//随机生成颜色值

  402. Scalar color = Scalar(255, 0, 255);

  403. drawContours( drawing, g_vContours, i, color, 2, 8, g_vHierarchy, 0, Point() );//绘制外层和内层轮廓

  404. circle( drawing, mc[i], 4, color, -1, 8, 0 );;//绘制圆

  405. }

  406. findcolor(drawing);

  407. //line1(point1,point2,ii,iii);

  408. // 显示到窗口中

  409. // namedWindow( WINDOW_NAME2, WINDOW_AUTOSIZE );

  410. imshow( WINDOW_NAME2, drawing );

  411. }

  412. void findline(Mat image)

  413. {

  414. cv::Mat_::iterator it = image.begin();

  415. cv::Mat_::iterator itend = image.end();

  416. for (;it != itend;it++)

  417. {

  418. if ((*it)[1] == 0 && (*it)[2] >= 100)//条件可能需要改变

  419. {

  420. if(flag2==0)

  421. {

  422. flag2 = 1;

  423. cout << "注意line1,避障"<

  424. //向主控发送消息

  425. }

  426. else

  427. {

  428. cout << "注意line2,避障" << endl;

  429. //向主控发送消息

  430. //避障一与避障二中间要隔一段时间

  431. }

  432. }

  433. }

  434. }

  435. void wave(const cv::Mat &image, cv::Mat &result)

  436. {

  437. cv::Mat srcX(image.rows / 2, image.cols / 2, CV_32F);

  438. cv::Mat srcY(image.rows / 2, image.cols / 2, CV_32F);

  439. for (int i = 0;i

  440. for (int j = 0;j < image.cols /2;j++)

  441. {

  442. srcX.at(i, j) = 2 * j;

  443. srcY.at(i, j) = 2 * i;

  444. }

  445. cv::remap(image, result, srcX, srcY, cv::INTER_LINEAR);

  446. }

  447. void VideoPlayer() {

  448. unsigned char *yuv422frame = NULL;

  449. unsigned long yuvframeSize = 0;

  450. string videoDev = "/dev/video0";

  451. V4L2Capture *vcap = new V4L2Capture(const_cast(videoDev.c_str()), 640, 480);

  452. vcap->openDevice();

  453. vcap->initDevice();

  454. vcap->startCapture();

  455. cvNamedWindow("Capture",CV_WINDOW_AUTOSIZE);

  456. IplImage* img;

  457. CvMat cvmat;

  458. double t;

  459. clock_t start, end;

  460. double number=0;

  461. int fps=0;

  462. while(1){

  463. start=clock();

  464. t = (double)cvGetTickCount();

  465. vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);

  466. cvmat = cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)yuv422frame); //CV_8UC3

  467. //解码

  468. img = cvDecodeImage(&cvmat,1);

  469. if(!img){

  470. printf("DecodeImage error!\n");

  471. }

  472. cv::Mat g_srcImage = cv::cvarrToMat(img,true);

  473. cvShowImage("Capture",img);

  474. cvReleaseImage(&img);

  475. vcap->backFrame();

  476. if((cvWaitKey(1)&255) == 27){

  477. exit(0);

  478. }

  479. wave(g_srcImage, g_srcImage);

  480. findline(g_srcImage);

  481. // 把原图像转化成灰度图像并进行平滑

  482. cvtColor(g_srcImage, g_grayImage, COLOR_BGR2GRAY);

  483. blur(g_grayImage, g_grayImage, Size(3, 3));

  484. //创建滚动条并进行初始化

  485. createTrackbar(" 阈值", WINDOW_NAME1, &g_nThresh, g_nMaxThresh, on_ThreshChange);

  486. on_ThreshChange(0, 0);

  487. t = (double)cvGetTickCount() - t;

  488. printf("Used time is %g ms\n", (t / (cvGetTickFrequency() * 1000)));

  489. end =clock();

  490. number=number+end-start;

  491. fps++;

  492. if (number/ CLOCKS_PER_SEC>= 0.25)//windows10 for CLK_TCK

  493. {

  494. cout<

  495. fps = 0;

  496. number = 0;

  497. }

  498. }

  499. vcap->stopCapture();

  500. vcap->freeBuffers();

  501. vcap->closeDevice();

  502. }

  503. int main() {

  504. VideoPlayer();

  505. return 0;

  506. }

你可能感兴趣的:(opencv,opencv)