基本思想:主要学习一下在共享内存中传递变长的数据,这样在c#调用c++dll也可以雷同操作,以oak的检测和共享内存为代码整合,集成了rapidjson的使用,代码自己摘要和参考吧
cmakelists.txt
cmake_minimum_required(VERSION 3.16)
project(depthai)
set(CMAKE_CXX_STANDARD 11)
find_package(OpenCV REQUIRED)
#message(STATUS ${OpenCV_INCLUDE_DIRS})
#添加头文件
include_directories(${OpenCV_INCLUDE_DIRS})
include_directories(${CMAKE_SOURCE_DIR}/include/rapidjson)
include_directories(${CMAKE_SOURCE_DIR}/include)
include_directories(${CMAKE_SOURCE_DIR}/include/utility)
#链接Opencv库
find_package(depthai CONFIG REQUIRED)
add_executable(depthai main.cpp Write.cpp Write.h Reader.cpp Reader.h common.h OAK.cpp OAK.h)
target_link_libraries(depthai ${OpenCV_LIBS} depthai::opencv -lpthread)
mian.cpp
#include "OAK.h"
#include
#include
#include
#include "Reader.h"
#include "Write.h"
void oak0(OAK *oak,WriteMem *writemem,int key_id)
{
int ok=writemem->init_paramter(key_id);
if(ok==0){
printf("initial write successfully\n");
}
oak->detect(writemem);
}
void tcp(ReaderMem *readermem,int key_id)
{
int ok=readermem->init_paramter(key_id);
if(ok==0){
printf("initial reader successfully\n");
}
while (true) {
readermem->read_data();
}
}
int main(int argc, char **argv) {
// oak初始化
OAK *oak = new OAK();
std::string nnPath("../frozen_darknet_yolov4_model.blob");
int key_id=3546;
oak->initial(nnPath);
///
WriteMem *writemem=new WriteMem();
ReaderMem *readermem=new ReaderMem();
std::thread thread1(oak0,oak,writemem,key_id);
std::thread thread2(tcp,readermem,key_id);
thread1.join();
thread2.join();
writemem->clean_data();
readermem->clean_data();
delete oak;// oak 释放
delete writemem;
delete readermem;
return 0;
}
common.h
//
// Created by ubuntu on 2022/11/15.
//
#ifndef DEPTHAI_COMMON_H
#define DEPTHAI_COMMON_H
#include "rapidjson/document.h"
#include "rapidjson/writer.h"
#include "rapidjson/stringbuffer.h"
using namespace rapidjson;
#define SHARE_MEMORY_BUFFER_LEN 1024
#define IMAGE_SIZE 3*416*416
struct stuShareMemory {
int iSignal;
char chBuffer[SHARE_MEMORY_BUFFER_LEN];
int chLength;
char pixel[IMAGE_SIZE];
int pixel_rows;
int pixel_cols;
int pixel_channels;
stuShareMemory() {
iSignal = 0;
chLength = 0;
pixel_rows = 0;
pixel_cols = 0;
pixel_channels = 0;
memset(chBuffer, 0, SHARE_MEMORY_BUFFER_LEN);
memset(pixel, 0, IMAGE_SIZE);
};
};
#endif //DEPTHAI_COMMON_H
OAK.h
#ifndef DEPTHAI_OAK_H
#define DEPTHAI_OAK_H
#include
#include "utility.hpp"
#include "depthai/depthai.hpp"
#include "Write.h"
#include "rapidjson/document.h"
#include "rapidjson/writer.h"
#include "rapidjson/stringbuffer.h"
using namespace rapidjson;
using namespace std;
using namespace std::chrono;
class OAK {
public:
OAK();
~OAK();
public:
int initial(std::string nnPath);
int detect(WriteMem *writemem);
private:
// Create pipeline
dai::Pipeline pipeline;
std::atomic syncNN{true};
std::shared_ptr previewQueue;
std::shared_ptr detectionNNQueue;
std::shared_ptr xoutBoundingBoxDepthMappingQueue;
std::shared_ptr depthQueue;
std::shared_ptr networkQueue;
cv::Scalar color = cv::Scalar(255, 255, 255);
bool printOutputLayersOnce = true;
const std::vector labelMap = {
"person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat",
"traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse",
"sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag",
"tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove",
"skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon",
"bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza",
"donut", "cake", "chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor",
"laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink",
"refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};
private:
WriteMem *write;
};
#endif //DEPTHAI_OAK_H
OAK.cpp
#include "OAK.h"
OAK::OAK() {
}
OAK::~OAK() {
}
int OAK::initial(std::string nnPath) {
// Define sources and outputs
auto camRgb = pipeline.create();
auto spatialDetectionNetwork = pipeline.create();
auto monoLeft = pipeline.create();
auto monoRight = pipeline.create();
auto stereo = pipeline.create();
auto xoutRgb = pipeline.create();
auto xoutNN = pipeline.create();
auto xoutBoundingBoxDepthMapping = pipeline.create();
auto xoutDepth = pipeline.create();
auto nnNetworkOut = pipeline.create();
xoutRgb->setStreamName("rgb");
xoutNN->setStreamName("detections");
xoutBoundingBoxDepthMapping->setStreamName("boundingBoxDepthMapping");
xoutDepth->setStreamName("depth");
nnNetworkOut->setStreamName("nnNetwork");
// Properties
camRgb->setPreviewSize(416, 416);
camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
camRgb->setInterleaved(false);
camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);
// setting node configs
stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_DENSITY);
// Align depth map to the perspective of RGB camera, on which inference is done
stereo->setDepthAlign(dai::CameraBoardSocket::RGB);
stereo->setOutputSize(monoLeft->getResolutionWidth(), monoLeft->getResolutionHeight());
spatialDetectionNetwork->setBlobPath(nnPath);
spatialDetectionNetwork->setConfidenceThreshold(0.5f);
spatialDetectionNetwork->input.setBlocking(false);
spatialDetectionNetwork->setBoundingBoxScaleFactor(0.5);
spatialDetectionNetwork->setDepthLowerThreshold(100);
spatialDetectionNetwork->setDepthUpperThreshold(5000);
// yolo specific parameters
spatialDetectionNetwork->setNumClasses(80);
spatialDetectionNetwork->setCoordinateSize(4);
spatialDetectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319});
spatialDetectionNetwork->setAnchorMasks({{"side26", {1, 2, 3}},
{"side13", {3, 4, 5}}});
spatialDetectionNetwork->setIouThreshold(0.5f);
// Linking
monoLeft->out.link(stereo->left);
monoRight->out.link(stereo->right);
camRgb->preview.link(spatialDetectionNetwork->input);
if (syncNN) {
spatialDetectionNetwork->passthrough.link(xoutRgb->input);
} else {
camRgb->preview.link(xoutRgb->input);
}
spatialDetectionNetwork->out.link(xoutNN->input);
spatialDetectionNetwork->boundingBoxMapping.link(xoutBoundingBoxDepthMapping->input);
stereo->depth.link(spatialDetectionNetwork->inputDepth);
spatialDetectionNetwork->passthroughDepth.link(xoutDepth->input);
spatialDetectionNetwork->outNetwork.link(nnNetworkOut->input);
// Connect to device and start pipeline
return 0;
}
int OAK::detect(WriteMem *writemem) {
dai::Device device(pipeline);
// Output queues will be used to get the rgb frames and nn data from the outputs defined above
previewQueue = device.getOutputQueue("rgb", 4, false);
detectionNNQueue = device.getOutputQueue("detections", 4, false);
xoutBoundingBoxDepthMappingQueue = device.getOutputQueue("boundingBoxDepthMapping", 4, false);
depthQueue = device.getOutputQueue("depth", 4, false);
networkQueue = device.getOutputQueue("nnNetwork", 4, false);
auto startTime = steady_clock::now();
while (true) {
rapidjson::Document doc;
doc.SetArray();
rapidjson::Document::AllocatorType &allocator = doc.GetAllocator();
auto imgFrame = previewQueue->get();
auto inDet = detectionNNQueue->get();
auto depth = depthQueue->get();
auto inNN = networkQueue->get();
if (printOutputLayersOnce && inNN) {
std::cout << "Output layer names: ";
for (const auto &ten : inNN->getAllLayerNames()) {
std::cout << ten << ", ";
}
std::cout << std::endl;
printOutputLayersOnce = false;
}
cv::Mat frame = imgFrame->getCvFrame();
cv::Mat depthFrame = depth->getFrame(); // depthFrame values are in millimeters
cv::Mat depthFrameColor;
cv::normalize(depthFrame, depthFrameColor, 255, 0, cv::NORM_INF, CV_8UC1);
cv::equalizeHist(depthFrameColor, depthFrameColor);
cv::applyColorMap(depthFrameColor, depthFrameColor, cv::COLORMAP_HOT);
auto detections = inDet->detections;
if (!detections.empty()) {
auto boundingBoxMapping = xoutBoundingBoxDepthMappingQueue->get();
auto roiDatas = boundingBoxMapping->getConfigData();
for (auto roiData : roiDatas) {
auto roi = roiData.roi;
roi = roi.denormalize(depthFrameColor.cols, depthFrameColor.rows);
auto topLeft = roi.topLeft();
auto bottomRight = roi.bottomRight();
auto xmin = (int) topLeft.x;
auto ymin = (int) topLeft.y;
auto xmax = (int) bottomRight.x;
auto ymax = (int) bottomRight.y;
cv::rectangle(depthFrameColor, cv::Rect(cv::Point(xmin, ymin), cv::Point(xmax, ymax)), color,
cv::FONT_HERSHEY_SIMPLEX);
}
}
//rapidjson::Document sub_doc;
// sub_doc.SetObject();
for (const auto &detection : detections) {
int x1 = detection.xmin * frame.cols;
int y1 = detection.ymin * frame.rows;
int x2 = detection.xmax * frame.cols;
int y2 = detection.ymax * frame.rows;
uint32_t labelIndex = detection.label;
if (labelIndex == 0) {
rapidjson::Document item_doc;
item_doc.SetObject();
std::string labelStr = to_string(labelIndex);
if (labelIndex < labelMap.size()) {
labelStr = labelMap[labelIndex];
}
item_doc.AddMember("x1", x1 * 1.0, allocator);
item_doc.AddMember("y1", y1 * 1.0, allocator);
item_doc.AddMember("x2", x2 * 1.0, allocator);
item_doc.AddMember("y2", y2 * 1.0, allocator);
item_doc.AddMember("c", detection.confidence * 1.0, allocator);
item_doc.AddMember("d", (int) detection.spatialCoordinates.z * 1.0, allocator);
doc.PushBack(item_doc, allocator);
cv::putText(frame, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream confStr;
confStr << std::fixed << std::setprecision(2) << detection.confidence * 100;
cv::putText(frame, confStr.str(), cv::Point(x1 + 10, y1 + 35), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthX;
depthX << "X: " << (int) detection.spatialCoordinates.x << " mm";
cv::putText(frame, depthX.str(), cv::Point(x1 + 10, y1 + 50), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthY;
depthY << "Y: " << (int) detection.spatialCoordinates.y << " mm";
cv::putText(frame, depthY.str(), cv::Point(x1 + 10, y1 + 65), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthZ;
depthZ << "Z: " << (int) detection.spatialCoordinates.z << " mm";
cv::putText(frame, depthZ.str(), cv::Point(x1 + 10, y1 + 80), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
cv::rectangle(frame, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
}
}
rapidjson::StringBuffer buffer;
rapidjson::Writer write_json(buffer);
doc.Accept(write_json);
std::string buf_json_str = buffer.GetString();
allocator.Clear();
writemem->write_data(buf_json_str,frame);
// cv::imshow("depth", depthFrameColor);
cv::imshow("rgb", frame);
int key = cv::waitKey(1);
if (key == 'q' || key == 'Q') {
return 0;
}
}
return 0;
}
Reader.h
#ifndef DEPTHAI_READER_H
#define DEPTHAI_READER_H
#include
#include
#include
#include
#include
#include
#include
#include "iostream"
#include "common.h"
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
#define SHARE_MEMORY_BUFFER_LEN 1024
class ReaderMem {
public:
ReaderMem();
~ReaderMem();
public:
int init_paramter(int key_id);
int read_data();
void clean_data();
private:
rapidjson::Document document;
rapidjson::Value DetectObj;
struct stuShareMemory *stu = NULL;
void *shm = NULL;
struct Msg *msg;
int shmid=-1;
};
#endif //DEPTHAI_READER_H
Reader.cpp
#include "Reader.h"
ReaderMem::ReaderMem(){}
ReaderMem::~ReaderMem(){}
int ReaderMem::init_paramter(int key_id){
printf("ReaderMem %d\n",key_id);
shmid = shmget((key_t)key_id, sizeof(struct stuShareMemory), 0666|IPC_CREAT);
if(shmid == -1)
{
printf("shmget err.\n");
return -1;
}
shm = shmat(shmid, (void*)0, 0);
if(shm == (void*)-1)
{
printf("shmat err.\n");
return -1;
}
stu = (struct stuShareMemory*)shm;
//stu->iSignal = 1;
return 0;
}
int ReaderMem::read_data() {
if(stu->iSignal != 0)
{
std::string acceptMem=stu->chBuffer;
std::string newMem=acceptMem.substr(0,stu->chLength);
printf("-----------reader------------ \n%s\n",newMem.c_str());
cv::Mat cvoutImg = cv::Mat(stu->pixel_rows,stu->pixel_cols,CV_8UC3,cv::Scalar(255, 255, 255));//bufHight,bufWidth
memcpy((char*)cvoutImg.data, stu->pixel,stu->pixel_rows*stu->pixel_cols*stu->pixel_channels);
cv::imshow("read",cvoutImg);
cv::waitKey(1);
if (!document.Parse(newMem.c_str()).HasParseError()) {
rapidjson::Value& json_array = document;
for (rapidjson::SizeType i = 0; i < json_array.Size(); i++)
{
rapidjson::Value& json_obj = json_array[i];
if (json_obj.IsObject())
{
if (json_obj.HasMember("x1"))
{
rapidjson::Value& json = json_obj["x1"];
if (json.IsFloat())
{
float x1 = json.GetFloat();
printf("%.2f\n",x1);
}
json = json_obj["y1"];
if (json.IsFloat())
{
float y1 = json.GetFloat();
printf("%.2f\n",y1);
}
json = json_obj["x2"];
if (json.IsFloat())
{
float x2 = json.GetFloat();
printf("%.2f\n",x2);
}
json = json_obj["c"];
if (json.IsFloat())
{
float c = json.GetFloat();
printf("%.2f\n",c);
}
json = json_obj["d"];
if (json.IsFloat())
{
float d = json.GetFloat();
printf("%.2f\n",d);
}
}
}
}
}
stu->iSignal = 0;
}
return 0;
}
void ReaderMem::clean_data() {
shmdt(shm);
shmctl(shmid, IPC_RMID, 0);
}
Write.h
#ifndef DEPTHAI_WRITE_H
#define DEPTHAI_WRITE_H
#include
#include
#include
#include
#include
#include
#include
#include "iostream"
using namespace std;
#define SHARE_MEMORY_BUFFER_LEN 1024
#include "common.h"
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
class WriteMem {
public:
WriteMem();
~WriteMem();
public:
int init_paramter(int key_id);
int write_data(std::string content,cv::Mat frame);
void clean_data();
private:
struct stuShareMemory *stu = NULL;
void *shm = NULL;
struct Msg *msg;
};
#endif //DEPTHAI_WRITE_H
Write.cpp
#include "Write.h"
WriteMem::WriteMem() {}
WriteMem::~WriteMem() {}
int WriteMem::init_paramter(int key_id) {
printf("WriteMem %d\n",key_id);
int shmid = shmget((key_t) key_id, sizeof(struct stuShareMemory), 0666 | IPC_CREAT);
if (shmid == -1) {
printf("shmget err.\n");
return -1;
}
shm = shmat(shmid, (void *) 0, 0);
if (shm == (void *) -1) {
printf("shmat err.\n");
return -1;
}
stu = (struct stuShareMemory *) shm;
stu->iSignal = 0;
return 0;
}
int WriteMem::write_data(std::string content,cv::Mat frame) {
printf("-----------write------------\n %s \n",content.c_str());
if (stu->iSignal != 1) {
stu->chLength = content.size();
memcpy(stu->chBuffer, content.c_str(), stu->chLength);
char *image=(char*)frame.data;
stu->pixel_rows = frame.rows;
stu->pixel_cols = frame.cols;
stu->pixel_channels = frame.channels();
memcpy(stu->pixel,image,stu->pixel_rows*stu->pixel_cols*stu->pixel_channels);
stu->iSignal = 1;
}
return 0;
}
void WriteMem::clean_data() {
shmdt(shm);
std::cout << "end progress." << endl;
}
测试数据
-----------write------------
[{"x1":30.0,"y1":266.0,"x2":125.0,"y2":414.0,"c":0.7772049903869629,"d":1530.0},{"x1":154.0,"y1":254.0,"x2":249.0,"y2":332.0,"c":0.6232306957244873,"d":3709.0},{"x1":0.0,"y1":309.0,"x2":38.0,"y2":415.0,"c":0.5261836051940918,"d":3442.0}]
-----------reader------------
[{"x1":30.0,"y1":266.0,"x2":125.0,"y2":414.0,"c":0.7772049903869629,"d":1530.0},{"x1":154.0,"y1":254.0,"x2":249.0,"y2":332.0,"c":0.6232306957244873,"d":3709.0},{"x1":0.0,"y1":309.0,"x2":38.0,"y2":415.0,"c":0.5261836051940918,"d":3442.0}]
30.00
266.00
125.00
0.78
1530.00
154.00
254.00
249.00
0.62
3709.00
0.00
309.00
38.00
0.53
3442.00
-----------write------------
[{"x1":29.0,"y1":267.0,"x2":121.0,"y2":415.0,"c":0.8682379722595215,"d":1765.0},{"x1":156.0,"y1":254.0,"x2":252.0,"y2":333.0,"c":0.605661153793335,"d":3693.0}]
-----------reader------------
[{"x1":29.0,"y1":267.0,"x2":121.0,"y2":415.0,"c":0.8682379722595215,"d":1765.0},{"x1":156.0,"y1":254.0,"x2":252.0,"y2":333.0,"c":0.605661153793335,"d":3693.0}]
29.00
267.00
121.00
0.87
1765.00
156.00
254.00
252.00
0.61
3693.00
参考:
OpenCV CEO教你用OAK(四):创建复杂的管道 - 知乎