到这里,我相信绝大多数大一的相关专业学生都可以完成这样一台扫描仪了。
让我们全局看一下这个小项目。
机械方面
可以参考三维扫描仪[8]——如何设计一台云台式扫描仪(机械结构)
我们需要一个支架,支起Kinect和云台,同时我们也需要一套齿轮组,获得精确的角度变化。
开发环境方面
可以参考:
三维扫描仪[6]——常用软件及开发环境
三维扫描仪[7]——认识Processing和Arduino开发环境
其中
Meshlab
项目代码方面
可以参考:
三维扫描仪[9]——如何设计一台云台式扫描仪(初步软件设计)
三维扫描仪[10]——如何设计一台云台式扫描仪(代码详解)
这里也附上全套代码
Arduino
步进电机代码
int xdir = 13;
int Step = 12;
int xen = 9;
void setup() {
// put your setup code here, to run once:
pinMode(xen, OUTPUT);
pinMode(xdir, OUTPUT);
pinMode(Step, OUTPUT);
digitalWrite(xen, LOW);
digitalWrite(xdir, LOW);
}
void loop() {
// put your main code here, to run repeatedly:
digitalWrite(Step, LOW);
delay(10);
digitalWrite(Step, HIGH);
delay(10);
}
角度传感器代码
int potPin = A0;
int Pos = 0;
void setup() {
// put your setup code here, to run once:
pinMode(potPin, INPUT);
Serial.begin(9600);
}
void loop() {
// put your main code here, to run repeatedly:
Pos = analogRead(potPin);
delay(250);
Serial.println(Pos);
}
Processing
import processing.serial.*;
import processing.opengl.*;
import SimpleOpenNI.*;
import kinectOrbit.*;
//Init Orbit and OpenNI Class.
KinectOrbit myOrbit;
SimpleOpenNI kinect;
//Serial data.
Serial myPort;
boolean serial = true;
//!
String turnTableAngle = "0";
int turnAngle = 0;
float Angle = 0;
//!
//Init pointClouds and ArrayList with clolors.
ArrayList scanPoints = new ArrayList();//pointCloud
ArrayList scanColors = new ArrayList();//obj color
ArrayList objectPoints = new ArrayList();//pointCloud
ArrayList objectColors = new ArrayList();//obj color
//Height
float baseHeight = -180;
float modelWidth = 1000;
float modelHeight = 1000;
PVector axis = new PVector(0, baseHeight, 1200);
int scanLines = 300;
int scanRes = 1; //high ppx
boolean scanning = false;
boolean arrived = false;
float[] shotNumber = new float[30];
int currentShot = 0;
int dataNum = 1;
public void setup()
{
size(800, 600, OPENGL);
//Init orbit
myOrbit = new KinectOrbit(this, 0, "kinect");
myOrbit.drawCS(true);
myOrbit.drawGizmo(true);
myOrbit.setCSScale(200);
myOrbit.drawGround(true);
//Init SimpleOpenNI
kinect = new SimpleOpenNI(this);
kinect.setMirror(false);
kinect.enableDepth();
kinect.enableRGB();
kinect.alternativeViewPointDepthToImage();
//Serial
if(serial) {
String portName = Serial.list()[0];
myPort = new Serial(this, portName, 9600);
//!
myPort.bufferUntil('\n');
//!
}
}
public void draw()
{
kinect.update();
background(0);
myOrbit.pushOrbit(this); //Start Orbit
drawPointCloud(1);
//!!!!!
updateObject(scanLines, scanRes);
//!!!!!
drawObjects();
drawBoundingBox();
kinect.drawCamFrustum();
myOrbit.popOrbit(this);
}
void drawPointCloud(int steps)
{
int index;
PVector realWorldPoint;
stroke(255);
for(int y = 0; y < kinect.depthHeight(); y += steps) {
for(int x = 0; x < kinect.depthWidth(); x += steps) {
index = x + y * kinect.depthWidth();
realWorldPoint = kinect.depthMapRealWorld()[index];
stroke(150);
point(realWorldPoint.x, realWorldPoint.y, realWorldPoint.z);
}
}
}
void drawObjects()
{
pushStyle();
strokeWeight(1);
for(int i = 1; i < objectPoints.size(); i++) {
stroke(objectColors.get(i).x, objectColors.get(i).y, objectColors.get(i).z);
point(objectPoints.get(i).x, objectPoints.get(i).y, objectPoints.get(i).z + axis.z);
}
for(int i = 1; i < scanPoints.size(); i++) {
stroke(scanColors.get(i).x, scanColors.get(i).y, scanColors.get(i).z);
point(scanPoints.get(i).x, scanPoints.get(i).y, scanPoints.get(i).z + axis.z);
}
popStyle();
}
void drawBoundingBox()
{
stroke(255, 0, 0);
line(axis.x, axis.y, axis.z, axis.x, axis.y+100, axis.z);
noFill();
pushMatrix();
translate(axis.x, axis.x + baseHeight + (modelHeight / 2), axis.z);
box(modelWidth, modelHeight, modelWidth);
popMatrix();
}
void scan()
{
for(PVector v : scanPoints) {
objectPoints.add(v.get());
int index = scanPoints.indexOf(v);
objectColors.add(scanColors.get(index).get());
}
if(currentShot < shotNumber.length-1) {
currentShot ++;
println("scan angle = " + Angle);
}
else {
scanning = false;
}
arrived = false;
}
void updateObject(int scanWidth, int step)
{
int index;
PVector realWorldPoint;
scanPoints.clear();
scanColors.clear();
serialEvent(myPort);
Angle = radians(map(turnAngle, 0, 1023, 0, 360));
//println("angle = " + Angle);
//draw line
pushMatrix();
translate(axis.x, axis.y, axis.z);
rotateY(Angle);
line(0, 0, 100, 0);
popMatrix();
int xMin = (int)(kinect.depthWidth() / 2 - scanWidth / 2);
int xMax = (int)(kinect.depthWidth() / 2 + scanWidth / 2);
for(int y = 0; y < kinect.depthHeight(); y += step) {
for(int x = xMin; x < xMax; x += step) {
index = x + (y * kinect.depthWidth());
realWorldPoint = kinect.depthMapRealWorld()[index];
color pointCol = kinect.rgbImage().pixels[index];
if(realWorldPoint.y < (modelHeight + baseHeight) && realWorldPoint.y > baseHeight) {
if(abs(realWorldPoint.x - axis.x) < modelWidth / 2) {
if(realWorldPoint.z < axis.z + modelWidth / 2 && realWorldPoint.z > axis.z - modelWidth / 2) {
PVector rotatedPoint;
realWorldPoint.z -= axis.z;
realWorldPoint.x -= axis.x;
rotatedPoint = vecRotY(realWorldPoint, Angle);
scanPoints.add(rotatedPoint.get());
scanColors.add(new PVector(red(pointCol), green(pointCol), blue(pointCol)));
}
}
}
}
}
}
PVector vecRotY(PVector vecIn, float phi)
{
PVector rotatedVec = new PVector();
rotatedVec.x = vecIn.x * cos(phi) - vecIn.z * sin(phi);
rotatedVec.z = vecIn.x * sin(phi) + vecIn.z * cos(phi);
rotatedVec.y = vecIn.y;
return rotatedVec;
}
public void keyPressed()
{
switch(key) {
case 'c':
objectPoints.clear();
objectColors.clear();
currentShot = 0;
break;
case 'e':
String stringNum = String.valueOf(dataNum);
char key = stringNum.charAt(0);
exportPly(key);
dataNum ++;
println("save success!");
break;
case 's':
scan();
break;
}
}
public void serialEvent(Serial myPort)
{
String inString = myPort.readStringUntil('\n');
if(inString != null) {
//cut space
turnTableAngle = trim(inString);
turnAngle = Integer.parseInt(turnTableAngle);
}
}
void exportPly(char key)
{
PrintWriter output;
String viewPointFileName;
viewPointFileName = "myPoints" + key + ".ply";
output = createWriter(dataPath(viewPointFileName));
//!!!!
//head file
output.println("ply");
output.println("format ascii 1.0");
output.println("comment This is your Processing ply file");
output.println("element vertex " + (objectPoints.size()-1));
output.println("property float x");
output.println("property float y");
output.println("property float z");
output.println("property uchar red");
output.println("property uchar green");
output.println("property uchar blue");
output.println("end_header");
//!!!!
for(int i = 0; i < objectPoints.size() - 1; i++) {
output.println(
(objectPoints.get(i).x / 1000) + " "
+ (objectPoints.get(i).y / 1000) + " "
+ (objectPoints.get(i).z / 1000) + " "
+ (int) objectColors.get(i).x + " "
+ (int) objectColors.get(i).y + " "
+ (int) objectColors.get(i).z
);
}
output.flush();
output.close();
}
三维重建原理方面
可以参考:
三维扫描仪[2]——大恒、微软、还是淘宝一下双目摄像头?
三维扫描仪[3]——标定·理论
本项目并不涉及、但是接下来会用到的标定方面
可以参考:
三维扫描仪[4]——标定·Matlab单目标定
三维扫描仪[5]——标定·Matlab双目标定
展望
本来没有这个部分的
然而我昨天(2017.4.21)散步的时候突然想到:
用两个Kinect
KinectA保持不动 KinectB在A的视野下运动 由KinectB扫描
我们需要确定A的空间位置 获得B实时的空间位置和角度
最终!根据多个空间坐标系的转换 实现手持Kinect扫描并叠加
没有实验过,也没考虑过优势。
只是作为一个二本大三的软件工程学生,感觉扫描仪再贴点,再有庞杂的机械结构,挺不好的。
希望廉价的扫描仪能服务更多的普通人吧。
希望我将来去漫展的时候,真的能把你们变成我的手办。