所有你想要的都在这儿了:
opengl 显示深度图,彩色图;播放视频;显示点云视频;还有等等技术
且让我一一道来:
准备工作:~如果你只想要一个opengl显示视频的demo,ok,在后面呢,不急哈;
1,环境搭建:opengl 我用的是glew + freeglut; 深度相机我用的是PicoZense DCAM710;
2, 好了可以愉快的建立工程了,神秘微笑,包含目录,链接库啥的,嘿嘿~
3, 嗯,我们需要一个shader.h来辅助加载和编译opengl shaders,~这个不是我的原创,类似头文件很普遍;
#ifndef _SHADER_H_
#define _SHADER_H_
#include
#include // std::istreambuf_iterator
#include
#include
#include
#include
struct ShaderFile
{
GLenum shaderType;
const char* filePath;
ShaderFile(GLenum type, const char* path)
:shaderType(type), filePath(path){}
};
class Shader
{
public:
Shader(const char* vertexPath, const char* fragPath) :programId(0)
{
std::vector fileVec;
fileVec.push_back(ShaderFile(GL_VERTEX_SHADER, vertexPath));
fileVec.push_back(ShaderFile(GL_FRAGMENT_SHADER, fragPath));
loadFromFile(fileVec);
}
Shader(const char* vertexPath, const char* fragPath, const char* geometryPath) :programId(0)
{
std::vector fileVec;
fileVec.push_back(ShaderFile(GL_VERTEX_SHADER, vertexPath));
fileVec.push_back(ShaderFile(GL_FRAGMENT_SHADER, fragPath));
fileVec.push_back(ShaderFile(GL_GEOMETRY_SHADER, geometryPath));
loadFromFile(fileVec);
}
void use() const
{
glUseProgram(this->programId);
}
~Shader()
{
if (this->programId)
{
glDeleteProgram(this->programId);
}
}
public:
GLuint programId;
private:
/*
*从文件加载顶点和片元着色器
*传递参数为[(着色器文件类型,着色器文件路径)]
*/
void loadFromFile(std::vector& shaderFileVec)
{
std::vector shaderObjectIdVec;
std::string vertexSource, fragSource;
std::vector sourceVec;
size_t shaderCount = shaderFileVec.size();
//读取文件源代码
for (size_t i = 0; i < shaderCount; ++i)
{
std::string shaderSource;
if (!loadShaderSource(shaderFileVec[i].filePath, shaderSource))
{
std::cout << "Error::Shader could not load file:" << shaderFileVec[i].filePath << std::endl;
return;
}
sourceVec.push_back(shaderSource);
}
bool bSuccess = true;
//编译 Shader object
for (size_t i = 0; i < shaderCount; ++i)
{
GLuint shaderId = glCreateShader(shaderFileVec[i].shaderType);
const char *c_str = sourceVec[i].c_str();
glShaderSource(shaderId, 1, &c_str, NULL);
glCompileShader(shaderId);
GLint compileStatus = 0;
glGetShaderiv(shaderId, GL_COMPILE_STATUS, &compileStatus);//检查编译状态
if (compileStatus == GL_FALSE)//获取错误报告
{
GLint maxLength = 0;
glGetShaderiv(shaderId, GL_INFO_LOG_LENGTH, &maxLength);
std::vector errLog(maxLength);
glGetShaderInfoLog(shaderId, maxLength, &maxLength, &errLog[0]);
std::cout << "Error::Shader file [" << shaderFileVec[i].filePath << " ] compiled failed,"
<< &errLog[0] << std::endl;
bSuccess = false;
}
shaderObjectIdVec.push_back(shaderId);
}
//链接shader program
if (bSuccess)
{
this->programId = glCreateProgram();
for (size_t i = 0; i < shaderCount; ++i)
{
glAttachShader(this->programId, shaderObjectIdVec[i]);
}
glLinkProgram(this->programId);
GLint linkStatus;
glGetProgramiv(this->programId, GL_LINK_STATUS, &linkStatus);
if (linkStatus == GL_FALSE)
{
GLint maxLength = 0;
glGetProgramiv(this->programId, GL_INFO_LOG_LENGTH, &maxLength);
std::vector errLog(maxLength);
glGetProgramInfoLog(this->programId, maxLength, &maxLength, &errLog[0]);
std::cout << "Error::Shader link failed," << &errLog[0] << std::endl;
}
}
//连接完成后detach 并释放 shader object
for (size_t i = 0; i < shaderCount; ++i)
{
if (this->programId != 0)
{
glDetachShader(this->programId, shaderObjectIdVec[i]);
}
glDeleteShader(shaderObjectIdVec[i]);
}
}
/*
*读取着色器程序源码
*/
bool loadShaderSource(const char* filePath, std::string& source)
{
source.clear();
std::ifstream in_stream(filePath);
if (!in_stream)
{
return false;
}
source.assign(std::istreambuf_iterator(in_stream),
std::istreambuf_iterator()); //文件流迭代器构造字符串
return true;
}
};
#endif
4, 我还准备了深度相机的SDK和辅助头文件,来实时获取RGBD数据流;SDK自己去官网下载PicoZense_DCAM710_SDK
#ifndef _PSFRAMEHELPPER_
#define _PSFRAMEHELPPER_
#include
#include "PicoZense_api.h" //yes, 这就是深度相机的SDK头文件
class PsFrameHelpper
{
public:
void PsFrameInit()
{
using namespace std;
status = PsInitialize();
if (status != PsReturnStatus::PsRetOK)
{
cout << "Initialize failed!" << endl;
system("pause");
exit(0);
}
status = PsOpenDevice(deviceIndex);
if (status != PsReturnStatus::PsRetOK)
{
cout << "OpenDevice 0 failed!" << endl;
system("pause");
exit(0);
}
PsSetMapperEnabledDepthToRGB(deviceIndex, true);
status = PsStartFrame(deviceIndex, PsDepthFrame);
status = PsStartFrame(deviceIndex, PsMappedRGBFrame);
}
void PsFrameClose()
{
status = PsStopFrame(deviceIndex, PsDepthFrame);
status = PsCloseDevice(deviceIndex);
status = PsShutdown();
}
public:
PsReturnStatus status;
int32_t deviceIndex = 0;
};
#endif
5,你以为已经准备妥当了?其实我也想~ 我们还要两个小shader来指示如何显示图片~
面片着色器:triangle.frag
#version 330 core
in vec2 tex_coord;
layout (location = 0) out vec4 color;
uniform sampler2D tex;
void main(void)
{
color = texture(tex,tex_coord);
color = color.bgra;
}
顶点着色器:triangle.vert
#version 330 core
layout (location = 0) in vec2 in_position;
layout (location = 1) in vec2 in_tex_coord;
out vec2 tex_coord;
void main(void)
{
gl_Position = vec4(in_position, 0.0, 1.0);
tex_coord = in_tex_coord;
}
6,前菜上完了,到正餐了,对,精髓~
#include
#include
#include "PsFrameHelpper.h"
#include
#include
#include "shader.h"
#include
#define GLUT_WHEEL_UP 3 //定义滚轮操作
#define GLUT_WHEEL_DOWN 4
struct Vertex
{
GLfloat x;
GLfloat y;
GLfloat z;
} ;
SYSTEMTIME sys;
// settings//-------------------------------------------------------------------------
const int SCR_WIDTH = 1493, SCR_HEIGHT =960;
static void UShort2Gray(PsDepthPixel *DepthImg, PsGray8Pixel *GrayImg, int width, int height, PsDepthRange DepthRange, bool isDepth2Gray);
static void Gray2Color(PsGray8Pixel *GrayImg, int width, int height, PsBGR888Pixel *ColorImg);
void DrowPointCloud();
void PointCloudWriter(bool isDepth);
void KeyBoards(unsigned char key, int x, int y);
void onMouseMove(int x, int y);
void myMouse(int button, int state, int x, int y);
void PrintUsage(){ std::cout << "Enter Numbers: 1 Near; 2 Mid; 3 Far; 5 ColorMap; 6 Show PointCloud; 8 PointCloudWrite" << std::endl; }
GLuint program;
GLuint vao;
GLuint quad_vbo;
GLuint tex;
float thetaX = 0.0, thetaY = 0.0, scaleFactor = 1.0;
static float dx = 0, dy = 0, oldy = -1, oldx = -1;
Vertex ptsCen = {0.0f, 0.0f, -2.5f};
PsFrameHelpper FrameHelpper;
PsReturnStatus status;
int32_t deviceIndex = 0;
PsFrameMode rgbFrameMode;
PsFrameMode depthFrameMode;
PsDepthRange DepthRange;
PsFrame rgbFrame = { 0 };
PsFrame depthFrame = { 0 };
PsFrame depthrgbFrame = { 0 };
PsCameraParameters cameraParam = { 0.0 };
bool colormap = true;
bool showPointCloud = true;
bool fullscreen = false;
long delayT;
//initial//-------------------------------------------------------------------------
void init(void)
{
//initialized PsSDK
FrameHelpper.PsFrameInit();
//get depthRange to set the Slope
PsGetDepthRange(deviceIndex, &DepthRange);
//get cameraParam to caculate the vertices position
PsGetCameraParameters(deviceIndex, PsDepthSensor, &cameraParam);
//get FrameMode to understand the framedata
PsGetFrameMode(deviceIndex, PsRGBFrame, &rgbFrameMode);
PsGetFrameMode(deviceIndex, PsDepthFrame, &depthFrameMode);
//initial openGL
glClearColor(0.0, 0.0, 0.0, 0.0);
glEnable(GL_DEBUG_OUTPUT);
static const GLfloat quad_data[] ={
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &quad_vbo);
glBindBuffer(GL_ARRAY_BUFFER, quad_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(quad_data), quad_data, GL_STATIC_DRAW);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)(8 * sizeof(float)));
glEnableVertexAttribArray(1);
glBindVertexArray(0);
//shader
static Shader shader("triangle.vert", "triangle.frag");
program = shader.programId;
// texture
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
}
//Display//-------------------------------------------------------------------------
void display(void)
{
//Get Frame
GetLocalTime(&sys);
long dwStart = sys.wMilliseconds;
PsReadNextFrame(deviceIndex);
PsGetFrame(deviceIndex, PsRGBFrame, &rgbFrame);
PsGetFrame(deviceIndex, PsDepthFrame, &depthFrame);
PsGetFrame(deviceIndex, PsMappedRGBFrame, &depthrgbFrame);
//opengl display
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glBindVertexArray(vao);
glBindTexture(GL_TEXTURE_2D, tex);
glUniform1i(glGetUniformLocation(program, "tex"), 0);
//show RGB image
if (rgbFrame.pFrameData != NULL && !fullscreen){
glViewport(640, 480, 853, 480);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB,
rgbFrameMode.resolutionWidth, rgbFrameMode.resolutionHeight,
0, GL_RGB, GL_UNSIGNED_BYTE, rgbFrame.pFrameData);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
//show Depth image
if (depthFrame.pFrameData != NULL&& !fullscreen){
glViewport(0,480, 640, 480);
//Depth Frame processing
PsDepthPixel *DepthFrameData = (PsDepthPixel *)depthFrame.pFrameData;
PsGray8Pixel *depthTex = new PsGray8Pixel[640 * 480];
UShort2Gray(DepthFrameData, depthTex, 640, 480, DepthRange, true);
if (!colormap){
//Depth_Gray image
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, 640, 480, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, depthTex);
}
else{
//Depth_color image
PsBGR888Pixel *ColorImg = new PsBGR888Pixel[640 * 480];
Gray2Color(depthTex, 640, 480, ColorImg);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, ColorImg);
delete ColorImg;
}
delete depthTex;
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
if (depthrgbFrame.pFrameData != NULL&& !fullscreen){
glViewport(0,0, 640, 480);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB,640, 480,0, GL_RGB, GL_UNSIGNED_BYTE, depthrgbFrame.pFrameData);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
glBindVertexArray(0);
glUseProgram(0);
if (showPointCloud){
if (fullscreen)
glViewport(0, 0, 1493, 960);
else
glViewport(640, 0, 853, 480);
glEnable(GL_DEPTH_TEST);
if (thetaY<0){
thetaY = thetaY + 360;
}
if (thetaY>360){
thetaY = thetaY - 360;
}
if (thetaX<0){
thetaX = thetaX + 360;
}
if (thetaX>360){
thetaX = thetaX - 360;
}
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, (GLfloat)853 / (GLfloat)480, 0.1, 1000.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0);
glRotatef(thetaX, 1, 0, 0);
glRotatef(thetaY, 0, 1, 0);
glScalef(scaleFactor, scaleFactor, scaleFactor);
glTranslatef(-ptsCen.x, -ptsCen.y, -ptsCen.z);
DrowPointCloud();
}
glutSwapBuffers();
GetLocalTime(&sys);
long dwEnd = sys.wMilliseconds;
long TimeSpend = dwEnd - dwStart;
long timedelay = dwEnd - delayT;
delayT = dwEnd;
if (TimeSpend < 0){
TimeSpend += 1000;
}
if (timedelay < 0){
timedelay += 1000;
}
//printf("frame TimeSpend: %d, fps: %d Hz\n", TimeSpend, 1000 / timedelay);
}
//ShutDown//----------------------------------------------------------------------------
void OnShutdown()
{
//destroy vbo & vao
glDeleteTextures(1, &tex);
glDeleteBuffers(1, &quad_vbo);
glDeleteVertexArrays(1, &vao);
FrameHelpper.PsFrameClose();
printf("Shutdown successfull");
}
//Timer//----------------------------------------------------------------------------
void TimerFunc(int value)
{
glutPostRedisplay();
glutTimerFunc(33, TimerFunc, 1);
}
//Main//----------------------------------------------------------------------------
int main(int argc, char *argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA);
glutInitWindowPosition(200, 10);
glutInitWindowSize(SCR_WIDTH, SCR_HEIGHT);
glutCreateWindow("GL_MultiFrame_Viewer");
PrintUsage();
if (glewInit() != GLEW_OK){
printf("Failed to initialize GLEW ... exiting");
exit(EXIT_FAILURE);
}
init();
glutKeyboardFunc(&KeyBoards);
glutMouseFunc(myMouse);
glutMotionFunc(onMouseMove);
glutDisplayFunc(&display);
glutTimerFunc(33, TimerFunc, 1);
glutCloseFunc(OnShutdown);
glutMainLoop();
return 0;
}
//Drow PointCloud//----------------------------------------------------------------------------
void DrowPointCloud(){
PsVector3f mWorldVector = { 0.0f };
PsDepthPixel* pdepth = (PsDepthPixel*)depthFrame.pFrameData;
PsBGR888Pixel * pcolordepth = (PsBGR888Pixel *)depthrgbFrame.pFrameData;
glPointSize(1.0f);
float fR = 1.0f, fG =1.0f, fB = 1.0f;
glBegin(GL_POINTS);
for (int h = 0; h < 480; h++){
for (int w = 0; w < 640; w++, pcolordepth++){
if (pdepth[h * 640 + w] == 0)
continue; //discard zero-depth points
else{
//caculate Vertices Position by depthFrame and cameraParam
mWorldVector.x = (w - cameraParam.cx) / cameraParam.fx * pdepth[h * 640 + w];
mWorldVector.y = (h - cameraParam.cy) / cameraParam.fy * pdepth[h * 640 + w];
mWorldVector.z = pdepth[h * 640 + w];
fR = (float)pcolordepth->r / 255;
fG = (float)pcolordepth->g / 255;
fB = (float)pcolordepth->b / 255;
//drow point
glColor3f(fR, fG, fB);
glVertex3f(mWorldVector.x / 100, -1*mWorldVector.y / 100, -1 * mWorldVector.z / 100);
}
}
}
glEnd();
}
//KeyBoards//----------------------------------------------------------------------------
void KeyBoards(unsigned char key, int x, int y)
{
switch (key){
case '1':
PsSetDepthRange(deviceIndex, PsNearRange);
PsGetDepthRange(deviceIndex, &DepthRange);
std::cout << "Set DepthRange: Near" << std::endl;
PrintUsage();
break;
case '2':
PsSetDepthRange(deviceIndex, PsMidRange);
PsGetDepthRange(deviceIndex, &DepthRange);
std::cout << "Set DepthRange: Mid" << std::endl;
PrintUsage();
break;
case '3':
PsSetDepthRange(deviceIndex, PsFarRange);
PsGetDepthRange(deviceIndex, &DepthRange);
std::cout << "Set DepthRange: Far" << std::endl;
PrintUsage();
break;
case '5':
colormap = !colormap;
break;
case'6':
showPointCloud = !showPointCloud;
std::cout << "show pointcloud : " << showPointCloud<r = 0;
tempRGB->g = 0;
tempRGB->b = 0;
}
else if (grayValue <= 51){
tempRGB->r = 255;
tempRGB->g = grayValue * 5;
tempRGB->b = 0;
}
else if (grayValue <= 102){
grayValue -= 51;
tempRGB->r = 255 - grayValue * 5;
tempRGB->g = 255;
tempRGB->b = 0;
}
else if (grayValue <= 153){
grayValue -= 102;
tempRGB->r = 0;
tempRGB->g = 255;
tempRGB->b = grayValue * 5;
}
else if (grayValue <= 204){
grayValue -= 153;
tempRGB->r = 0;
tempRGB->g = 255 - static_cast(grayValue *128.0 / 51 + 0.5);
tempRGB->b = 255;
}
else if (grayValue <= 255){
grayValue -= 204;
tempRGB->r = 255;
tempRGB->g = 127 - static_cast(grayValue *127.0 / 51 + 0.5);
tempRGB->b = 0;
}
}
}
//Unsigned short to unsigned char//----------------------------------------------------------------------------
static void UShort2Gray(PsDepthPixel *DepthImg, PsGray8Pixel *GrayImg, int width, int height, PsDepthRange DepthRange, bool isDepth2Gray)
{
uint32_t slope;
//Depth16 to Depth8
if (isDepth2Gray){
//slope depends on DepthRange
if (DepthRange == PsNearRange)
slope = 1450;
else if (DepthRange == PsMidRange)
slope = 3000;
else
slope = 4400;
}
//Gray16 to Gray8
else
slope = 3840;
for (int i = 0; i< width * height; i++)
{
int grayValue;
if (int(DepthImg[i])
7,如果不多说两句,这代码仍然没有灵魂~ 对我要开大了
第一,为啥要定义一个矩形顶点数组:static const GLfloat quad_data[]
不瞒你说,opengl需要一个“画布”也就是一个矩形面片来播放图像,我们不停的给这个面片贴上(通过纹理贴图)相机更新上来的图像,这样就是视频啦~
第二,为什么需要计时器TimerFunc(int value),没有他,你就不知道什么时候更新这个循环,他让我们的循环更加方便;
第三, 为什么把uint_16的深度图转换到uint_8的类型(UShort2Gray)?因为我们不能显示16位的图像,所以将他先映射到255空间,然后给他定义一个伪彩色转换(Gray2Color),这样我们就可以看到漂亮的深度图啦~
第四,点云转换,和彩色点云的绘制,这个有点技术含量,但没有门槛的,一下就会,老生常谈了,那个转换的核心代码到处都是,每得到一个(x,y,z)记得从map好的彩色图里对应取出他的对应像素点,把这个点当成颜色画给那个需要他的坐标点,嗯,picozense 的SDK已经帮你准备好了对准的彩色图,只要你先设定一下,具体看那个helpper;
第五, 我们会显示三路图像视频,和一路点云视频。图像有深度图,彩色图,对齐图,同时点云还要可以被鼠标控制旋转和缩放,这需要一点技巧,我们把他们化成四个小窗口,分别绘制他们,三张图像的播放思路是一样的,点云另起一条思路。这里我们用到一个有用的gl函数:如glViewport(0,480, 640, 480);他指定了这个函数放在哪个位置,嗯,很关键~
第六, 其实是上面没说完的,点云的鼠标控制,当然还有键盘响应事件,我们定义鼠标回调幻术,对幻术,哈哈哈~这需要opengl的一些组合控制才能配合好,哎呀,我实在讲不下去啦,这地方最乱~你自己琢磨吧~你自己琢磨吧~~嗯,推卸真香~
最后,来,给你们看一下这加了特效的duang~
嗯,很简洁的布局,真机智~
欢迎转载,不过别忘标注出处哦~
下载地址:Opengl_Display_RGBD.zip