环境搭建相关的参考资料挺多的,这里就不多说了。这里记一些相关的api。
Table of Contents
0. 查看相机信息。
1. 设置分辨率。
2. 获取相机depthScale
3. 将彩色图像和深度图像对齐
4. 设置sr300相机的远近模式。
5. 获取相机内参
测试程序
~$ rs-sensor-control。然后照着对应提示信息输入即可。
彩色相机支持的分辨率挺多的,但是深度相机最多支持640*480。可以通过对其操作,将深度图自动上采样到其他分辨率(比如1080p),这些在api中都已经自动完成。
rs2::config cfg;
//cfg.enable_stream(RS2_STREAM_COLOR, 1920, 1080, RS2_FORMAT_BGR8, 10);
cfg.enable_stream(RS2_STREAM_COLOR, 1280, 720, RS2_FORMAT_BGR8, 30);
cfg.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
tips:在使用1080p时,经常会出现 didn`t arrived in 5000的错误,这是数据量传输过大,延迟所导致的error。目前一些有效的解决方案:
①使用3.0的usb接口。
②将帧率调小到10。
auto scale = sensor.get_depth_scale();
默认状态下获取的图像是没有对齐的,存在一定偏差。api中可以直接获取到对齐的图像信息。
rs2::frameset data = pipe.wait_for_frames(); // Wait for next set of frames from the camera
rs2::align align(rs2_stream::RS2_STREAM_COLOR); // align depth and rgb image
rs2::frameset aligned_frm = align.process(data); // aligned depth to rgb
rs2::frame color = aligned_frm.get_color_frame();
rs2::frame depth = aligned_frm.get_depth_frame();
sensor.set_option(RS2_OPTION_VISUAL_PRESET, 0); //默认0,近距离;1为远距离;还有一些其他的,可以参考sdk
rs2::frame color = aligned_frm.get_color_frame();
rs2::stream_profile cprofile = color.get_profile();
rs2::video_stream_profile cvsprofile(cprofile);
rs2_intrinsics color_intrin = cvsprofile.get_intrinsics();
std::cout<<"\ncolor intrinsics: ";
std::cout<
ok。后续待更新
下面直接放一个可以直接跑起来的测试程序:
环境:librealsense, opencv(任意版本)
// License: Apache 2.0. See LICENSE file in root directory.
// Copyright(c) 2017 Intel Corporation. All Rights Reserved.
#include // Include RealSense Cross Platform API
#include // Include OpenCV API
#include
#include
using namespace std;
int main(int argc, char * argv[]) try
{
unsigned int count = 0;
unsigned int flag =0;
// Declare depth colorizer for pretty visualization of depth data
rs2::colorizer color_map;
// Declare RealSense pipeline, encapsulating the actual device and sensors
rs2::pipeline pipe;
// Start streaming with default recommended configuration
rs2::config cfg;
//cfg.enable_stream(RS2_STREAM_COLOR, 1920, 1080, RS2_FORMAT_BGR8, 30);
cfg.enable_stream(RS2_STREAM_COLOR, 1280, 720, RS2_FORMAT_BGR8, 30);
cfg.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
rs2::pipeline_profile selection = pipe.start(cfg);
// Find first depth sensor (devices can have zero or more then one)
auto sensor = selection.get_device().first();
//sensor.set_option(RS2_OPTION_DEPTH_UNITS, 0.01f);
auto scale = sensor.get_depth_scale();
cout<<"scale: "<().get_width();
const int h = color.as().get_height();
// Create OpenCV matrix of size (w,h) from the colorized depth data
Mat image(Size(w, h), CV_8UC3, (void*)color.get_data(), Mat::AUTO_STEP);
Mat imageD(Size(w, h), CV_16UC1, (void*)depth.get_data(), Mat::AUTO_STEP);
// Update the window with new data
imshow(window_name, image);
imshow("1", imageD);
if(cv::waitKey(30) == 's'){
/*string RGBname = "data//img"+to_string(count)+".png";
string depthName = "data//depth"+to_string(count++)+".png";
imwrite(RGBname,image);
imwrite(depthName,imageD);*/
imwrite("color.jpg",image);
imwrite("depth.png",imageD);
}
flag++;
}
return EXIT_SUCCESS;
}
catch (const rs2::error & e)
{
std::cerr << "RealSense error calling " << e.get_failed_function() << "(" << e.get_failed_args() << "):\n " << e.what() << std::endl;
return EXIT_FAILURE;
}
catch (const std::exception& e)
{
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
原创文章,转载请联系作者并附上相关链接