首页 > 系统相关 >48、OAK通过共享内存传递变长结构体(Rapidjson)进行数据和图片交互

48、OAK通过共享内存传递变长结构体(Rapidjson)进行数据和图片交互

时间:2022-11-21 11:03:41浏览次数:72  
标签:std 共享内存 48 int auto OAK json include cv


基本思想:主要学习一下在共享内存中传递变长的数据,这样在c#调用c++dll也可以雷同操作,以oak的检测和共享内存为代码整合,集成了rapidjson的使用,代码自己摘要和参考吧

cmakelists.txt

cmake_minimum_required(VERSION 3.16)
project(depthai)
set(CMAKE_CXX_STANDARD 11)
find_package(OpenCV REQUIRED)
#message(STATUS ${OpenCV_INCLUDE_DIRS})
#添加头文件
include_directories(${OpenCV_INCLUDE_DIRS})
include_directories(${CMAKE_SOURCE_DIR}/include/rapidjson)
include_directories(${CMAKE_SOURCE_DIR}/include)
include_directories(${CMAKE_SOURCE_DIR}/include/utility)
#链接Opencv库
find_package(depthai CONFIG REQUIRED)
add_executable(depthai main.cpp Write.cpp Write.h Reader.cpp Reader.h common.h OAK.cpp OAK.h)
target_link_libraries(depthai ${OpenCV_LIBS} depthai::opencv -lpthread)

mian.cpp

#include "OAK.h"
#include <iostream>
#include <string>
#include <thread>
#include "Reader.h"
#include "Write.h"

void oak0(OAK *oak,WriteMem *writemem,int key_id)
{
int ok=writemem->init_paramter(key_id);
if(ok==0){
printf("initial write successfully\n");
}
oak->detect(writemem);

}


void tcp(ReaderMem *readermem,int key_id)
{
int ok=readermem->init_paramter(key_id);
if(ok==0){
printf("initial reader successfully\n");
}
while (true) {

readermem->read_data();


}
}

int main(int argc, char **argv) {
// oak初始化
OAK *oak = new OAK();
std::string nnPath("../frozen_darknet_yolov4_model.blob");

int key_id=3546;
oak->initial(nnPath);
///

WriteMem *writemem=new WriteMem();

ReaderMem *readermem=new ReaderMem();



std::thread thread1(oak0,oak,writemem,key_id);
std::thread thread2(tcp,readermem,key_id);
thread1.join();
thread2.join();



writemem->clean_data();
readermem->clean_data();
delete oak;// oak 释放
delete writemem;
delete readermem;

return 0;
}

common.h

//
// Created by ubuntu on 2022/11/15.
//

#ifndef DEPTHAI_COMMON_H
#define DEPTHAI_COMMON_H

#include "rapidjson/document.h"
#include "rapidjson/writer.h"
#include "rapidjson/stringbuffer.h"

using namespace rapidjson;

#define SHARE_MEMORY_BUFFER_LEN 1024
#define IMAGE_SIZE 3*416*416

struct stuShareMemory {
int iSignal;
char chBuffer[SHARE_MEMORY_BUFFER_LEN];
int chLength;
char pixel[IMAGE_SIZE];
int pixel_rows;
int pixel_cols;
int pixel_channels;

stuShareMemory() {
iSignal = 0;
chLength = 0;
pixel_rows = 0;
pixel_cols = 0;
pixel_channels = 0;
memset(chBuffer, 0, SHARE_MEMORY_BUFFER_LEN);
memset(pixel, 0, IMAGE_SIZE);

};
};


#endif //DEPTHAI_COMMON_H

OAK.h

#ifndef DEPTHAI_OAK_H
#define DEPTHAI_OAK_H
#include <chrono>

#include "utility.hpp"

#include "depthai/depthai.hpp"
#include "Write.h"

#include "rapidjson/document.h"
#include "rapidjson/writer.h"
#include "rapidjson/stringbuffer.h"
using namespace rapidjson;
using namespace std;
using namespace std::chrono;



class OAK {
public:
OAK();

~OAK();

public:
int initial(std::string nnPath);
int detect(WriteMem *writemem);

private:
// Create pipeline
dai::Pipeline pipeline;
std::atomic<bool> syncNN{true};
std::shared_ptr<dai::DataOutputQueue> previewQueue;
std::shared_ptr<dai::DataOutputQueue> detectionNNQueue;
std::shared_ptr<dai::DataOutputQueue> xoutBoundingBoxDepthMappingQueue;
std::shared_ptr<dai::DataOutputQueue> depthQueue;
std::shared_ptr<dai::DataOutputQueue> networkQueue;
cv::Scalar color = cv::Scalar(255, 255, 255);
bool printOutputLayersOnce = true;
const std::vector<std::string> labelMap = {
"person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat",
"traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse",
"sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag",
"tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove",
"skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon",
"bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza",
"donut", "cake", "chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor",
"laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink",
"refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};

private:
WriteMem *write;

};


#endif //DEPTHAI_OAK_H

OAK.cpp

#include "OAK.h"


OAK::OAK() {


}

OAK::~OAK() {

}

int OAK::initial(std::string nnPath) {

// Define sources and outputs
auto camRgb = pipeline.create<dai::node::ColorCamera>();
auto spatialDetectionNetwork = pipeline.create<dai::node::YoloSpatialDetectionNetwork>();
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto monoRight = pipeline.create<dai::node::MonoCamera>();
auto stereo = pipeline.create<dai::node::StereoDepth>();

auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
auto xoutNN = pipeline.create<dai::node::XLinkOut>();
auto xoutBoundingBoxDepthMapping = pipeline.create<dai::node::XLinkOut>();
auto xoutDepth = pipeline.create<dai::node::XLinkOut>();
auto nnNetworkOut = pipeline.create<dai::node::XLinkOut>();

xoutRgb->setStreamName("rgb");
xoutNN->setStreamName("detections");
xoutBoundingBoxDepthMapping->setStreamName("boundingBoxDepthMapping");
xoutDepth->setStreamName("depth");
nnNetworkOut->setStreamName("nnNetwork");

// Properties
camRgb->setPreviewSize(416, 416);
camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
camRgb->setInterleaved(false);
camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);

monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);

// setting node configs
stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_DENSITY);
// Align depth map to the perspective of RGB camera, on which inference is done
stereo->setDepthAlign(dai::CameraBoardSocket::RGB);
stereo->setOutputSize(monoLeft->getResolutionWidth(), monoLeft->getResolutionHeight());

spatialDetectionNetwork->setBlobPath(nnPath);
spatialDetectionNetwork->setConfidenceThreshold(0.5f);
spatialDetectionNetwork->input.setBlocking(false);
spatialDetectionNetwork->setBoundingBoxScaleFactor(0.5);
spatialDetectionNetwork->setDepthLowerThreshold(100);
spatialDetectionNetwork->setDepthUpperThreshold(5000);

// yolo specific parameters
spatialDetectionNetwork->setNumClasses(80);
spatialDetectionNetwork->setCoordinateSize(4);
spatialDetectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319});
spatialDetectionNetwork->setAnchorMasks({{"side26", {1, 2, 3}},
{"side13", {3, 4, 5}}});
spatialDetectionNetwork->setIouThreshold(0.5f);

// Linking
monoLeft->out.link(stereo->left);
monoRight->out.link(stereo->right);

camRgb->preview.link(spatialDetectionNetwork->input);
if (syncNN) {
spatialDetectionNetwork->passthrough.link(xoutRgb->input);
} else {
camRgb->preview.link(xoutRgb->input);
}

spatialDetectionNetwork->out.link(xoutNN->input);
spatialDetectionNetwork->boundingBoxMapping.link(xoutBoundingBoxDepthMapping->input);

stereo->depth.link(spatialDetectionNetwork->inputDepth);
spatialDetectionNetwork->passthroughDepth.link(xoutDepth->input);
spatialDetectionNetwork->outNetwork.link(nnNetworkOut->input);
// Connect to device and start pipeline

return 0;
}

int OAK::detect(WriteMem *writemem) {


dai::Device device(pipeline);

// Output queues will be used to get the rgb frames and nn data from the outputs defined above
previewQueue = device.getOutputQueue("rgb", 4, false);
detectionNNQueue = device.getOutputQueue("detections", 4, false);
xoutBoundingBoxDepthMappingQueue = device.getOutputQueue("boundingBoxDepthMapping", 4, false);
depthQueue = device.getOutputQueue("depth", 4, false);
networkQueue = device.getOutputQueue("nnNetwork", 4, false);
auto startTime = steady_clock::now();


while (true) {
rapidjson::Document doc;
doc.SetArray();
rapidjson::Document::AllocatorType &allocator = doc.GetAllocator();

auto imgFrame = previewQueue->get<dai::ImgFrame>();
auto inDet = detectionNNQueue->get<dai::SpatialImgDetections>();
auto depth = depthQueue->get<dai::ImgFrame>();
auto inNN = networkQueue->get<dai::NNData>();

if (printOutputLayersOnce && inNN) {
std::cout << "Output layer names: ";
for (const auto &ten : inNN->getAllLayerNames()) {
std::cout << ten << ", ";
}
std::cout << std::endl;
printOutputLayersOnce = false;
}

cv::Mat frame = imgFrame->getCvFrame();
cv::Mat depthFrame = depth->getFrame(); // depthFrame values are in millimeters

cv::Mat depthFrameColor;
cv::normalize(depthFrame, depthFrameColor, 255, 0, cv::NORM_INF, CV_8UC1);
cv::equalizeHist(depthFrameColor, depthFrameColor);
cv::applyColorMap(depthFrameColor, depthFrameColor, cv::COLORMAP_HOT);


auto detections = inDet->detections;
if (!detections.empty()) {
auto boundingBoxMapping = xoutBoundingBoxDepthMappingQueue->get<dai::SpatialLocationCalculatorConfig>();
auto roiDatas = boundingBoxMapping->getConfigData();

for (auto roiData : roiDatas) {
auto roi = roiData.roi;
roi = roi.denormalize(depthFrameColor.cols, depthFrameColor.rows);
auto topLeft = roi.topLeft();
auto bottomRight = roi.bottomRight();
auto xmin = (int) topLeft.x;
auto ymin = (int) topLeft.y;
auto xmax = (int) bottomRight.x;
auto ymax = (int) bottomRight.y;

cv::rectangle(depthFrameColor, cv::Rect(cv::Point(xmin, ymin), cv::Point(xmax, ymax)), color,
cv::FONT_HERSHEY_SIMPLEX);
}
}
//rapidjson::Document sub_doc;
// sub_doc.SetObject();
for (const auto &detection : detections) {
int x1 = detection.xmin * frame.cols;
int y1 = detection.ymin * frame.rows;
int x2 = detection.xmax * frame.cols;
int y2 = detection.ymax * frame.rows;
uint32_t labelIndex = detection.label;

if (labelIndex == 0) {
rapidjson::Document item_doc;
item_doc.SetObject();
std::string labelStr = to_string(labelIndex);
if (labelIndex < labelMap.size()) {
labelStr = labelMap[labelIndex];
}


item_doc.AddMember("x1", x1 * 1.0, allocator);
item_doc.AddMember("y1", y1 * 1.0, allocator);
item_doc.AddMember("x2", x2 * 1.0, allocator);
item_doc.AddMember("y2", y2 * 1.0, allocator);
item_doc.AddMember("c", detection.confidence * 1.0, allocator);
item_doc.AddMember("d", (int) detection.spatialCoordinates.z * 1.0, allocator);
doc.PushBack(item_doc, allocator);

cv::putText(frame, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream confStr;
confStr << std::fixed << std::setprecision(2) << detection.confidence * 100;
cv::putText(frame, confStr.str(), cv::Point(x1 + 10, y1 + 35), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);

std::stringstream depthX;
depthX << "X: " << (int) detection.spatialCoordinates.x << " mm";
cv::putText(frame, depthX.str(), cv::Point(x1 + 10, y1 + 50), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthY;
depthY << "Y: " << (int) detection.spatialCoordinates.y << " mm";
cv::putText(frame, depthY.str(), cv::Point(x1 + 10, y1 + 65), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthZ;
depthZ << "Z: " << (int) detection.spatialCoordinates.z << " mm";
cv::putText(frame, depthZ.str(), cv::Point(x1 + 10, y1 + 80), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);

cv::rectangle(frame, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
}

}


rapidjson::StringBuffer buffer;
rapidjson::Writer<rapidjson::StringBuffer> write_json(buffer);
doc.Accept(write_json);
std::string buf_json_str = buffer.GetString();
allocator.Clear();

writemem->write_data(buf_json_str,frame);



// cv::imshow("depth", depthFrameColor);
cv::imshow("rgb", frame);

int key = cv::waitKey(1);
if (key == 'q' || key == 'Q') {
return 0;
}
}
return 0;
}

Reader.h

#ifndef DEPTHAI_READER_H
#define DEPTHAI_READER_H
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/shm.h>
#include <string.h>
#include <signal.h>
#include "iostream"
#include "common.h"
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
#define SHARE_MEMORY_BUFFER_LEN 1024

class ReaderMem {
public:
ReaderMem();
~ReaderMem();

public:
int init_paramter(int key_id);
int read_data();
void clean_data();
private:
rapidjson::Document document;
rapidjson::Value DetectObj;
struct stuShareMemory *stu = NULL;
void *shm = NULL;
struct Msg *msg;
int shmid=-1;



};
#endif //DEPTHAI_READER_H

Reader.cpp

#include "Reader.h"

ReaderMem::ReaderMem(){}
ReaderMem::~ReaderMem(){}

int ReaderMem::init_paramter(int key_id){

printf("ReaderMem %d\n",key_id);
shmid = shmget((key_t)key_id, sizeof(struct stuShareMemory), 0666|IPC_CREAT);
if(shmid == -1)
{
printf("shmget err.\n");
return -1;
}

shm = shmat(shmid, (void*)0, 0);
if(shm == (void*)-1)
{
printf("shmat err.\n");
return -1;
}

stu = (struct stuShareMemory*)shm;

//stu->iSignal = 1;


return 0;
}

int ReaderMem::read_data() {



if(stu->iSignal != 0)
{
std::string acceptMem=stu->chBuffer;
std::string newMem=acceptMem.substr(0,stu->chLength);
printf("-----------reader------------ \n%s\n",newMem.c_str());
cv::Mat cvoutImg = cv::Mat(stu->pixel_rows,stu->pixel_cols,CV_8UC3,cv::Scalar(255, 255, 255));//bufHight,bufWidth
memcpy((char*)cvoutImg.data, stu->pixel,stu->pixel_rows*stu->pixel_cols*stu->pixel_channels);
cv::imshow("read",cvoutImg);
cv::waitKey(1);
if (!document.Parse(newMem.c_str()).HasParseError()) {
rapidjson::Value& json_array = document;

for (rapidjson::SizeType i = 0; i < json_array.Size(); i++)
{
rapidjson::Value& json_obj = json_array[i];
if (json_obj.IsObject())
{
if (json_obj.HasMember("x1"))
{
rapidjson::Value& json = json_obj["x1"];
if (json.IsFloat())
{
float x1 = json.GetFloat();
printf("%.2f\n",x1);
}
json = json_obj["y1"];
if (json.IsFloat())
{
float y1 = json.GetFloat();
printf("%.2f\n",y1);
}
json = json_obj["x2"];
if (json.IsFloat())
{
float x2 = json.GetFloat();
printf("%.2f\n",x2);
}
json = json_obj["c"];
if (json.IsFloat())
{
float c = json.GetFloat();
printf("%.2f\n",c);
}
json = json_obj["d"];
if (json.IsFloat())
{
float d = json.GetFloat();
printf("%.2f\n",d);
}
}
}
}
}


stu->iSignal = 0;
}

return 0;
}

void ReaderMem::clean_data() {
shmdt(shm);
shmctl(shmid, IPC_RMID, 0);

}

Write.h

#ifndef DEPTHAI_WRITE_H
#define DEPTHAI_WRITE_H
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/shm.h>
#include <string.h>
#include <signal.h>
#include "iostream"
using namespace std;
#define SHARE_MEMORY_BUFFER_LEN 1024
#include "common.h"
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"


class WriteMem {
public:
WriteMem();
~WriteMem();

public:
int init_paramter(int key_id);
int write_data(std::string content,cv::Mat frame);
void clean_data();
private:
struct stuShareMemory *stu = NULL;
void *shm = NULL;
struct Msg *msg;
};


#endif //DEPTHAI_WRITE_H

Write.cpp

#include "Write.h"

WriteMem::WriteMem() {}

WriteMem::~WriteMem() {}

int WriteMem::init_paramter(int key_id) {
printf("WriteMem %d\n",key_id);
int shmid = shmget((key_t) key_id, sizeof(struct stuShareMemory), 0666 | IPC_CREAT);
if (shmid == -1) {
printf("shmget err.\n");
return -1;
}

shm = shmat(shmid, (void *) 0, 0);
if (shm == (void *) -1) {
printf("shmat err.\n");
return -1;
}

stu = (struct stuShareMemory *) shm;

stu->iSignal = 0;

return 0;
}

int WriteMem::write_data(std::string content,cv::Mat frame) {

printf("-----------write------------\n %s \n",content.c_str());

if (stu->iSignal != 1) {
stu->chLength = content.size();
memcpy(stu->chBuffer, content.c_str(), stu->chLength);

char *image=(char*)frame.data;
stu->pixel_rows = frame.rows;
stu->pixel_cols = frame.cols;
stu->pixel_channels = frame.channels();
memcpy(stu->pixel,image,stu->pixel_rows*stu->pixel_cols*stu->pixel_channels);


stu->iSignal = 1;
}
return 0;
}

void WriteMem::clean_data() {
shmdt(shm);

std::cout << "end progress." << endl;
}

测试数据

-----------write------------
[{"x1":30.0,"y1":266.0,"x2":125.0,"y2":414.0,"c":0.7772049903869629,"d":1530.0},{"x1":154.0,"y1":254.0,"x2":249.0,"y2":332.0,"c":0.6232306957244873,"d":3709.0},{"x1":0.0,"y1":309.0,"x2":38.0,"y2":415.0,"c":0.5261836051940918,"d":3442.0}]
-----------reader------------
[{"x1":30.0,"y1":266.0,"x2":125.0,"y2":414.0,"c":0.7772049903869629,"d":1530.0},{"x1":154.0,"y1":254.0,"x2":249.0,"y2":332.0,"c":0.6232306957244873,"d":3709.0},{"x1":0.0,"y1":309.0,"x2":38.0,"y2":415.0,"c":0.5261836051940918,"d":3442.0}]
30.00
266.00
125.00
0.78
1530.00
154.00
254.00
249.00
0.62
3709.00
0.00
309.00
38.00
0.53
3442.00
-----------write------------
[{"x1":29.0,"y1":267.0,"x2":121.0,"y2":415.0,"c":0.8682379722595215,"d":1765.0},{"x1":156.0,"y1":254.0,"x2":252.0,"y2":333.0,"c":0.605661153793335,"d":3693.0}]
-----------reader------------
[{"x1":29.0,"y1":267.0,"x2":121.0,"y2":415.0,"c":0.8682379722595215,"d":1765.0},{"x1":156.0,"y1":254.0,"x2":252.0,"y2":333.0,"c":0.605661153793335,"d":3693.0}]
29.00
267.00
121.00
0.87
1765.00
156.00
254.00
252.00
0.61
3693.00

参考:

​OpenCV CEO教你用OAK(四):创建复杂的管道 - 知乎​

标签:std,共享内存,48,int,auto,OAK,json,include,cv
From: https://blog.51cto.com/u_12504263/5873022

相关文章

  • P2448 无尽的生命
    可以把连续不改变的一条线段看做一个点,重新赋值,求逆序对即可。注意开\(longlong\)!#include<iostream>#include<algorithm>#include<cmath>#include<cstring>#incl......
  • 48-50. 数据合并,一对一,多对一,多对多
         #数据合并一对一#相同的列进行合并importpandasaspddf1=pd.read_excel('student1.xlsx')df2=pd.read_excel('student2.xlsx')#dfl#测试数据读......
  • CF48G Galaxy Union
    CF48GGalaxyUnion给定一棵基环树,求每个点到其他点的最小距离之和。\(n\leq2\times10^5\)。对于在环上的点和在树上的点分开处理。设在环上的点集为\(H\),先求出......
  • 绿色2048
    绿色2048 最近2048小游戏非常火。 做了一个,可以通过以下链接下载或通过附件下载,安装到android手机即可。​​http://www.eoeshop.com:50057/apk/shuzi.apk​​ 说明;绿色......
  • 一次启动失败引发的思考:-server -XX:PermSize=2048M -XX:MaxPermSize=4096m
    Tomcat启动参数启动项目时,由于项目比较大,无法正常启动,报异常:java.lang.OutOfMemoryError:PermGenspace,在idea中设置VMoptions为:-server-XX:PermSize=2048M-XX:MaxPer......
  • 代码48985656
    4564jashdahdoias的款式哦对哈送i和x下面是演示代码//这里是需要高亮的代码importReact,{Component}from'react'componentDidCat......
  • 华普物联HP-ERS-T200,振动传感器监测方案,工业级双串口通信服务器,RS485/232转以太网双
    随着科技的进步,现在工业化设备正逐步走向复杂化、高速化、自动化。为了掌握设备运行状态、避免发生事故,对生产中的关键机组实行在线监测和故障诊断,也越来越引起人们的重视......
  • 什么是485中继器,RS-485中继器产品介绍
    485中继器是光隔离的RS-485/422的数据中继通信产品,可以中继延长RS-485/422总线网络的通信距离,增强RS-485/422总线网络设备的数目。可以将485总线进行光电隔离,防止共模电压干......
  • 485光隔离中继器产品特点及应用领域介绍
    光电隔离RS485/RS422中继器,可作为485信号中继放大或485信号转422信号使用,一款专为解决RS-485/422信号长距离传输时,信号弱、信号易干扰问题的产品。那么,485光隔离中继器产品......
  • 485通讯转换器产品功能特点介绍
    485转换器主要的作用是将单端的RS-232信号转换为平衡差分的RS-485或RS-422信号。RS-485、RS-422自动识别功能,使用更加简单。那么,485转换器产品有哪些特点呢?接下来我们就跟随......