ModelNet40 中加入自己的数据集
ModelNet40 :http://modelnet.cs.princeton.edu/#
含有40个内别的CAD三维模型,是评价点云深度学习模型进行语意分割、实例分割和分类的标准数据集
代码功能:对自己的点云目标进行标准化融合到ModleNet40中
主要步骤:
中心化:将点云中心移动到坐标原点;
尺度缩放:将所有点的坐标的绝对值限制在1以内;
采样:将点云采样到固定大小点数;
输出保存文件的格式;
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 25 21:53:03 2019
@author: sgl
"""
import os
import sys
import numpy as np
import h5py
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
def getDataFiles(list_filename):
return [line.rstrip() for line in open(list_filename)]
def loadDataFile(path):
data = np.loadtxt(path)
num = data.shape[0]
point_xyz = data[:,0:3]
point_normal = data[:,3:6]
point_rgb = data[:,6:9]
# label just a example, should be repalced the real.
# modlenet40 is 0-39, so the label can be 40 and 41
label = np.ones((num,1), dtype=int)+39
return point_xyz, label
def change_scale(data):
#centre
xyz_min = np.min(data[:,0:3],axis=0)
xyz_max = np.max(data[:,0:3],axis=0)
xyz_move = xyz_min+(xyz_max-xyz_min)/2
data[:,0:3] = data[:,0:3]-xyz_move
#scale
scale = np.max(data[:,0:3])
data[:,0:3] = data[:,0:3]/scale
return data
def sample_data(data, num_sample):
""" data is in N x ...
we want to keep num_samplexC of them.
if N > num_sample, we will randomly keep num_sample of them.
if N < num_sample, we will randomly duplicate samples.
"""
N = data.shape[0]
if (N == num_sample):
return data, range(N)
elif (N > num_sample):
sample = np.random.choice(N, num_sample)
return data[sample, ...], sample
else:
sample = np.random.choice(N, num_sample-N)
dup_data = data[sample, ...]
return np.concatenate([data, dup_data], 0), list(range(N))+list(sample)
if __name__ == "__main__":
DATA_FILES =getDataFiles(os.path.join(BASE_DIR, 'file_path.txt'))
num_sample = 4096
DATA_ALL = []
for fn in range(len(DATA_FILES)):
current_data, current_label = loadDataFile(DATA_FILES[fn])
change_data = change_scale(current_data)
data_sample,index = sample_data(change_data, num_sample)
data_label = np.hstack((data_sample,current_label[index]))
DATA_ALL.append(data_label)
output = np.vstack(DATA_ALL)
output = output.reshape(-1,num_sample,4)
# train and test number, save data
if not os.path.exists('plant_train.h5'):
with h5py.File('plant_train.h5') as f:
f['data'] = output[0:7,0:3]
f['labels'] = output[0:8,4]
if not os.path.exists('plant_test.h5'):
with h5py.File('plant_test.h5') as f:
f['data'] = output[7:9,0:3]
f['labels'] = output[7:9,4]
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 25 16:42:25 2019
@author: sgl
"""
import os
import sys
import numpy as np
import h5py
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
def sample_data(data, num_sample):
""" data is in N x ...
we want to keep num_samplexC of them.
if N > num_sample, we will randomly keep num_sample of them.
if N < num_sample, we will randomly duplicate samples.
"""
N = data.shape[0]
if (N == num_sample):
# return data, range(N)
return data
elif (N > num_sample):
sample = np.random.choice(N, num_sample)
# return data[sample, ...], sample
return data[sample, ...]
else:
sample = np.random.choice(N, num_sample-N)
dup_data = data[sample, ...]
# return np.concatenate([data, dup_data], 0), list(range(N))+list(sample)
return np.concatenate([data, dup_data], 0)
def creat_pcd_rgba(data,path):
#write pcd file
# path = os.path.join(BASE_DIR, 'out_put_'+str(1)+ '.pcd')
if os.path.exists(path):
os.remove(path)
Output_Data = open(path, 'a')
# headers
Output_Data.write('# .PCD v0.7 - Point Cloud Data file format\nVERSION 0.7\nFIELDS x y z rgba\nSIZE 4 4 4 4\nTYPE F F F U\nCOUNT 1 1 1 1')
string = '\nWIDTH ' + str(data.shape[0])
Output_Data.write(string)
Output_Data.write('\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0')
string = '\nPOINTS ' + str(data.shape[0])
Output_Data.write(string)
Output_Data.write('\nDATA ascii')
# pack RGB
for j in range(data.shape[0]):
string = ('\n' + str(data[j,0]) + ' ' + str(data[j,1]) + ' ' +str(data[j,2]) + ' ' + str(int(data[j,3])))
Output_Data.write(string)
Output_Data.close()
def creat_pcd_rgb(data,path):
if os.path.exists(path):
os.remove(path)
Output_Data = open(path, 'a')
# headers
Output_Data.write('# .PCD v0.7 - Point Cloud Data file format\nVERSION 0.7\nFIELDS x y z rgba\nSIZE 4 4 4 4\nTYPE F F F U\nCOUNT 1 1 1 1')
string = '\nWIDTH ' + str(data.shape[0])
Output_Data.write(string)
Output_Data.write('\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0')
string = '\nPOINTS ' + str(data.shape[0])
Output_Data.write(string)
Output_Data.write('\nDATA ascii')
# pack RGB
for j in range(data.shape[0]):
R=data[j,3]
G=data[j,4]
B=data[j,5]
value = (int(R) << 16 | int(G) << 8 | int(B))
string = ('\n' + str(data[j,0]) + ' ' + str(data[j,1]) + ' ' +str(data[j,2]) + ' ' + str(value))
Output_Data.write(string)
Output_Data.close()
def creat_txt(data,path):
np.savetxt(path,data)
data = np.loadtxt("/media/sgl/数据(混合硬盘)/pcl_ubuntu/pcl_test/plant.txt")
#centre
xyz_min = np.min(data[:,0:3],axis=0)
xyz_max = np.max(data[:,0:3],axis=0)
xyz_move = xyz_min+(xyz_max-xyz_min)/2
data[:,0:3] = data[:,0:3]-xyz_move
#scale
scale = np.max(data[:,0:3])
data[:,0:3] = data[:,0:3]/scale
#sample
data = sample_data(data,4096)
creat_pcd_rgba(data,'/media/sgl/数据(混合硬盘)/PointNet/basisnet/out_put_1.pcd')
creat_txt(data,'/media/sgl/数据(混合硬盘)/PointNet/basisnet/out_put_1.txt')
TXT生成PCD文件(Python实现)
代码功能:将保存点云信息的TXT文件转化为pcl可读的pcd文件。
'''
This code is transform .txt to the .pcd.
and this is the original point cloud
2019.5.23
'''
import os
import sys
import numpy as np
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
sys.path.append(BASE_DIR)
sys.path.append(ROOT_DIR)
ROOT_DIR = os.path.join(ROOT_DIR, 'pointnet(enhence_11)/sem_seg/log11')
colour_map = (255,0,0,
0,255,0,
0,0,255,
156,102,31,
255,192,203,
255,0,255,
0,255,255,
255,255,0,
51,161,201,
128,42,42,
48,128,20,
160,32,240,
255,128,0)
def creat_real_pcd(input_path, output_path):
# loda data
Full_Data = np.loadtxt(input_path)
# creat output file
if os.path.exists(output_path):
os.remove(output_path)
Output_Data = open(output_path, 'a')
# headers
Output_Data.write('# .PCD v0.7 - Point Cloud Data file format\nVERSION 0.7\nFIELDS x y z rgba\nSIZE 4 4 4 4\nTYPE F F F U\nCOUNT 1 1 1 1')
string = '\nWIDTH ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0')
string = '\nPOINTS ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nDATA ascii')
# pack RGB
for j in range(Full_Data.shape[0]):
R=Full_Data[j,3]
G=Full_Data[j,4]
B=Full_Data[j,5]
value = (int(R) << 16 | int(G) << 8 | int(B))
string = ('\n' + str(Full_Data[j,0]) + ' ' + str(Full_Data[j, 1]) + ' ' +str(Full_Data[j, 2]) + ' ' + str(value))
Output_Data.write(string)
Output_Data.close()
def creat_pred_pcd(input_path, output_path):
# loda data
Full_Data = np.loadtxt(input_path)
# creat output file
if os.path.exists(output_path):
os.remove(output_path)
Output_Data = open(output_path, 'a')
# headers
Output_Data.write('# .PCD v0.7 - Point Cloud Data file format\nVERSION 0.7\nFIELDS x y z rgba\nSIZE 4 4 4 4\nTYPE F F F U\nCOUNT 1 1 1 1')
string = '\nWIDTH ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0')
string = '\nPOINTS ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nDATA ascii')
# pack RGB
for j in range(Full_Data.shape[0]):
index = Full_Data[j,7]
R=colour_map[int(index)]
G=colour_map[int(index+1)]
B=colour_map[int(index+2)]
value = (int(R) << 16 | int(G) << 8 | int(B))
string = ('\n' + str(Full_Data[j,0]) + ' ' + str(Full_Data[j, 1]) + ' ' +str(Full_Data[j, 2]) + ' ' + str(value))
Output_Data.write(string)
Output_Data.close()
def creat_gt_pcd(input_data_path, input_label_path, output_path):
# loda data
Full_Data = np.loadtxt(input_data_path)
Label_Data = np.loadtxt(input_label_path)
# creat output file
if os.path.exists(output_path):
os.remove(output_path)
Output_Data = open(output_path, 'a')
# headers
Output_Data.write('# .PCD v0.7 - Point Cloud Data file format\nVERSION 0.7\nFIELDS x y z rgba\nSIZE 4 4 4 4\nTYPE F F F U\nCOUNT 1 1 1 1')
string = '\nWIDTH ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0')
string = '\nPOINTS ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nDATA ascii')
# pack RGB
for j in range(Full_Data.shape[0]):
index = Label_Data[j]
R=colour_map[int(index)]
G=colour_map[int(index+1)]
B=colour_map[int(index+2)]
value = (int(R) << 16 | int(G) << 8 | int(B))
string = ('\n' + str(Full_Data[j,0]) + ' ' + str(Full_Data[j, 1]) + ' ' +str(Full_Data[j, 2]) + ' ' + str(value))
Output_Data.write(string)
Output_Data.close()
def creat_err_pcd(input_data_path, input_label_path, output_path):
# loda data
Full_Data = np.loadtxt(input_data_path)
Label_Data = np.loadtxt(input_label_path)
# creat output file
if os.path.exists(output_path):
os.remove(output_path)
Output_Data = open(output_path, 'a')
# headers
Output_Data.write('# .PCD v0.7 - Point Cloud Data file format\nVERSION 0.7\nFIELDS x y z rgba\nSIZE 4 4 4 4\nTYPE F F F U\nCOUNT 1 1 1 1')
string = '\nWIDTH ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0')
string = '\nPOINTS ' + str(Full_Data.shape[0])
Output_Data.write(string)
Output_Data.write('\nDATA ascii')
# pack RGB
for j in range(Full_Data.shape[0]):
index = int(Label_Data[j])
index1 = int(Full_Data[j,7])
if (index != index1):
R=int(255)
G=int(0)
B=int(0)
else:
R=int(234)
G=int(234)
B=int(234)
value = (int(R) << 16 | int(G) << 8 | int(B))
string = ('\n' + str(Full_Data[j,0]) + ' ' + str(Full_Data[j, 1]) + ' ' +str(Full_Data[j, 2]) + ' ' + str(value))
Output_Data.write(string)
Output_Data.close()
if __name__=='__main__':
OUTPUT_PATH_LIST = [os.path.join(ROOT_DIR,line.rstrip()) for line in open(os.path.join(ROOT_DIR, 'output_filelist.txt'))]
for i in range(len(OUTPUT_PATH_LIST)):
print('Processing: %d/%d'%(i,len(OUTPUT_PATH_LIST)))
input_data_path = OUTPUT_PATH_LIST[i]
input_label_path = (OUTPUT_PATH_LIST[i])[:-8] + 'gt.txt'
output_real_path = os.path.join(ROOT_DIR,'PCD_file/epoch_80', os.path.basename(OUTPUT_PATH_LIST[i])[:-8] + 'real.pcd')
output_pred_path = os.path.join(ROOT_DIR,'PCD_file/epoch_80', os.path.basename(OUTPUT_PATH_LIST[i])[:-8] + 'pred.pcd')
output_gt_path = os.path.join(ROOT_DIR,'PCD_file/epoch_80', os.path.basename(OUTPUT_PATH_LIST[i])[:-8] + 'gt.pcd')
output_err_path = os.path.join(ROOT_DIR,'PCD_file/epoch_80', os.path.basename(OUTPUT_PATH_LIST[i])[:-8] + 'err.pcd')
creat_real_pcd(input_data_path, output_real_path)
creat_pred_pcd(input_data_path, output_pred_path)
creat_gt_pcd(input_data_path, input_label_path, output_gt_path)
creat_err_pcd(input_data_path, input_label_path, output_err_path)
循环可视化文件夹下的所有PCD文件
程序功能:显示文件夹下的所有PCD文件(包含有获取文件夹下所有文件名)
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
using namespace pcl;
using namespace pcl::io;
using namespace pcl::console;
using namespace std;
void getFiles(string path, vector& files)
{
//文件句柄
long hFile = 0;
//文件信息
struct _finddata_t fileinfo;
string p;
if ((hFile = _findfirst(p.assign(path).append("\\*").c_str(), &fileinfo)) != -1)
{
do
{
//如果是目录,迭代之
//如果不是,加入列表
if ((fileinfo.attrib & _A_SUBDIR))
{
if (strcmp(fileinfo.name, ".") != 0 && strcmp(fileinfo.name, "..") != 0)
getFiles(p.assign(path).append("\\").append(fileinfo.name), files);
}
else
{
files.push_back(p.assign(path).append("\\").append(fileinfo.name));
}
} while (_findnext(hFile, &fileinfo) == 0);
_findclose(hFile);
}
}
int main()
{
vector files;
char * filePath = "F:\\S3DIS\\pointnet(enhence_pcd_result)\\find";
获取该路径下的所有文件
getFiles(filePath, files);
char str[30];
int size = files.size();
for (int i = 0; i < size; i++)
{
cout << files[i].c_str() << endl;
pcl::PointCloud::Ptr cloud(new pcl::PointCloud);
pcl::PointCloud::Ptr cloud_filtered_1(new pcl::PointCloud);
//string path = files[i].c_str;
pcl::io::loadPCDFile(files[i], *cloud);
// 植物在xyz大方向上的杂点去除
pcl::PassThrough pass;
pass.setInputCloud(cloud);
pass.setFilterFieldName("y");
pass.setFilterLimits(0.0, 2.5); // 调节该处参数即可 0 是上 0.75是下 这个是铜钱草的高度设置 2018-9-5 15:28:03
pass.filter(*cloud_filtered_1);
//pass.setInputCloud(cloud_filtered_1);
//pass.setFilterFieldName("x");
//pass.setFilterLimits(-18, -3);
//pass.filter(*cloud_filtered_12);
//创建PCL的可视化窗口 2016-8-6 12:54:27
pcl::visualization::PCLVisualizer viewer("PCL Viewer");
//设置框内背景颜色,1.0相当于255。 2016-8-1 16:40:15
viewer.setBackgroundColor(1.0, 1.0, 1.0);
//将新点云呈现在窗口中
viewer.addPointCloud(cloud_filtered_1, "PCL Viewer");
viewer.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 6, "PCL Viewer");
//viewer.addCoordinateSystem(1);
//将新点云呈现在窗口中 2016-8-1 17:23:28
while (!viewer.wasStopped())
{
viewer.spinOnce();
}
}
return 0;
}
pcd2txt
"""
Created on Tue Oct 29 22:43:59 2019
@author: sgl
"""
import os
from os import listdir, path
path_str = '/media/david/pcd' # your directory path
txts = [f for f in listdir(path_str)
if f.endswith('.pcd') and path.isfile(path.join(path_str, f))]
for txt in txts:
with open(os.path.join(path_str, txt), 'r') as f:
lines = f.readlines()
with open(os.path.join(path_str,os.path.splitext(txt)[0]+".txt"), 'w') as f:
f.write(''.join(lines[11:]))
参考链接:
https://blog.csdn.net/SGL_LGS/article/details/101382997
https://blog.csdn.net/SGL_LGS/article/details/105890437