上传yolo3和yolo8合并的代码

main
Jinhuan 5 days ago
parent a650161d80
commit 9f842cfef0

@ -63,6 +63,9 @@
"thread": "cpp",
"cinttypes": "cpp",
"typeindex": "cpp",
"typeinfo": "cpp"
"typeinfo": "cpp",
"cassert": "cpp",
"bitset": "cpp",
"regex": "cpp"
}
}

@ -0,0 +1,5 @@
{
"files.associations": {
"memory": "cpp"
}
}

@ -0,0 +1,36 @@
//// YOLOManager.cpp
//#include "YOLOManager.h"
//
//YOLOManager::~YOLOManager() {
// // 这里可以添加清理资源的代码
//}
#include "YOLOManager.h"
#include <alg_jd_yolo3.h>
#include <alg_jd_yolo8.h>
bool YOLOManager::InitializeDetector(SysConf& sys_conf, uint8_t Num) {
bool ret=true;
params[Num].rectConfidenceThreshold = sys_conf.ConfThreshold;
params[Num].modelPath = sys_conf.model_path.toStdString();
params[Num].imgSize = { IMAGE_WIDTH, IMAGE_HEIGHT };
#ifdef USE_CUDA
params[Num].cudaEnable = true;
#else
params[Num].cudaEnable = false;
#endif
//params[Num].modelType = YOLO_DETECT_V3;
params[Num].modelType = YOLO_DETECT_V8;
if (params[Num].modelType == YOLO_DETECT_V8)
detector[Num] = std::unique_ptr<YOLO>(new YOLO_V8());
else if (params[Num].modelType == YOLO_DETECT_V3)
detector[Num] = std::unique_ptr<YOLO>(new YOLO_V3());
if (detector[Num])
{
ret&=detector[Num]->InitParam(params[Num]);
ret&=detector[Num]->CreateSession(params[Num].modelPath);
}
return ret;
}

@ -0,0 +1,92 @@
#pragma once
#include "common.h"
#include <basecamera.h>
#include <memory>
#include <opencv2/opencv.hpp>
enum MODEL_TYPE
{
YOLO_DETECT_V3 = 0,
//FLOAT32 MODEL
YOLO_DETECT_V8 = 1,
YOLO_POSE = 2,
YOLO_CLS = 3,
//FLOAT16 MODEL
YOLO_DETECT_V8_HALF = 4,
YOLO_POSE_V8_HALF = 5,
YOLO_CLS_HALF = 6
};
typedef struct _DL_INIT_PARAM
{
std::string modelPath;
MODEL_TYPE modelType = YOLO_DETECT_V8;
std::vector<int> imgSize = { IMAGE_WIDTH, IMAGE_HEIGHT };
float rectConfidenceThreshold = 0.6;
float iouThreshold = 0.5;
int keyPointsNum = 2;//Note:kpt number for pose
bool cudaEnable = false;
int logSeverityLevel = 3;
int intraOpNumThreads = 1;
} DL_INIT_PARAM;
typedef struct _DL_RESULT
{
int classId;
float confidence;
cv::Rect box;
std::vector<cv::Point2f> keyPoints;
} DL_RESULT;
class YOLO
{
public:
virtual bool InitParam(DL_INIT_PARAM& iParams) = 0;
virtual bool CreateSession(std::string model_path, std::string model_name) = 0;
virtual bool CreateSession(std::string model_path) = 0;
virtual char* RunSession(cv::Mat& in, cv::Mat& out, std::vector<std::pair<int, cv::Rect> >& results) = 0;
virtual char* RunSession(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results) = 0;
virtual char* WarmUpSession() = 0;
virtual char* WarmUpSession(int shoot) = 0;
};
class YOLOManager {
public:
// 获取单例实例的方法
static YOLOManager& GetInstance() {
static YOLOManager instance;
return instance;
}
// 获取 YOLO 检测器实例
YOLO* GetDetector(uint8_t Num) const {
return detector[Num].get();
}
bool InitializeDetector(SysConf& sys_conf, uint8_t Num);
private:
// 私有构造函数,防止外部实例化
YOLOManager() = default;
// 禁用拷贝构造函数和赋值操作符
YOLOManager(const YOLOManager&) = delete;
YOLOManager& operator=(const YOLOManager&) = delete;
~YOLOManager() = default;
// 初始化检测器
#ifdef __DEBUG
DL_INIT_PARAM params[NumberOfSupportedCameras+1];
std::unique_ptr<YOLO> detector[NumberOfSupportedCameras+1];
#else
DL_INIT_PARAM params[NumberOfSupportedCameras];
std::unique_ptr<YOLO> detector[NumberOfSupportedCameras];
#endif
};

@ -1,37 +1,38 @@
#include "alg_jd.h"
#include "alg_jd_yolo3.h"
#include <direct.h> //所需的库文件
extern SysConf g_sys_conf;
bool YOLO_V3::InitParam(DL_INIT_PARAM& iParams)
{
// Initialize the parameters
static float confThreshold = g_sys_conf.ConfThreshold * 0.01; // Confidence threshold
static float nmsThreshold = 0.4; // Non-maximum suppression threshold
static int inpWidth = 416; // Width of network's input image
static int inpHeight = 416; // Height of network's input image
static std::vector<std::string> classes;
confThreshold = iParams.rectConfidenceThreshold * 0.01; // Confidence threshold
nmsThreshold = 0.4; // Non-maximum suppression threshold
inpWidth = 416; // Width of network's input image
inpHeight = 416; // Height of network's input image
return true;
}
bool AlgJd::init(QString model_path, QString model_name)
bool YOLO_V3::CreateSession(std::string model_path, std::string model_name)
{
// Load names of classes
std::string classesFile;
cv::String modelConfiguration;
cv::String modelWeights;
QString image_path;
std::string modelConfiguration;
std::string modelWeights;
std::string image_path;
if (model_path.length() > 0 && model_name.length() > 0) {
// 拼凑的模型文件路径
modelWeights = model_path.toStdString() + "/" + model_name.toStdString();
modelConfiguration = model_path.toStdString() + "/jd.cfg";
classesFile = model_path.toStdString() + "/jd.names";
image_path = model_path + "/" + "alg_jd.bmp";
modelWeights = model_path + "/" + model_name;
modelConfiguration = model_path + "/jd.cfg";
classesFile = model_path + "/jd.names";
image_path = model_path + "/" + "alg_jd"+ std::string(SUFFIX);
}
else {
modelWeights = "D:/model/jd.weights";
classesFile = "D:/model/jd.names";
// Give the configuration and weight files for the model
modelConfiguration = "D:/model/jd.cfg";
image_path = "D:/model/alg_jd.bmp";
image_path = "D:/model/alg_jd"+ std::string(SUFFIX);
}
std::ifstream classNamesFile(classesFile.c_str());
@ -48,57 +49,64 @@ bool AlgJd::init(QString model_path, QString model_name)
// Load the network
net = cv::dnn::readNetFromDarknet(modelConfiguration, modelWeights);
#ifdef USE_CUDA
net.setPreferableBackend(cv::dnn::DNN_BACKEND_CUDA);
net.setPreferableTarget(cv::dnn::DNN_TARGET_CUDA);
#endif
//cv::Mat image = cv::imread("alg_jd.bmp");
cv::Mat image = cv::imread(image_path.toStdString());
//cv::Mat image = cv::imread("alg_jd"+SUFFIX);
cv::Mat image = cv::imread(image_path);
/*if (g_sys_conf.model_jpg_path.length() > 0) {
image = cv::imread(g_sys_conf.model_jpg_path.toStdString());
}
else {
image = cv::imread("D:/Release/alg_jd.bmp");
image = cv::imread("D:/Release/alg_jd"+SUFFIX);
}*/
//识别一张图测试模型是否正确并且完成GPU数据加载
if (!image.data) return false; //判断测试图片是否正常读取
std::vector<std::pair<int, cv::Rect> > results;
detect(image, image, results);
RunSession(image, image, results);
if (results.size() > 0)
return true; //检测到目标,则初始化成功
else
return false; //否则初始化失败
}
bool AlgJd::test_detect()
bool YOLO_V3::CreateSession(std::string model_path)
{
return CreateSession(model_path, "");
}
char* YOLO_V3::WarmUpSession()
{
cv::Mat m1;
m1 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
m1 = cv::Mat(IMAGE_HEIGHT, IMAGE_WIDTH, CV_8UC3, cv::Scalar(0, 0, 0));
std::vector<std::pair<int, cv::Rect> > results;
double t = (double)cv::getTickCount();
detect(m1, m1, results);
RunSession(m1, m1, results);
t = ((double)cv::getTickCount() - t) / cv::getTickFrequency();
DEBUG(" test_detect time process:%f\n", t);
return true;
DEBUG(" WarmUpSession time process:%f\n", t);
return (char*)"WarmUpSession pass";
}
bool AlgJd::test_detect_batcht(int shoot)
char* YOLO_V3::WarmUpSession(int shoot)
{
std::vector<cv::Mat> vec_in;
std::vector<cv::Mat> vec_out;
std::vector<std::vector<std::pair<int, cv::Rect>>> vec_results;
cv::Mat m1, m2, m3;
cv::Mat m4;
m1 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
m2 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
m3 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
m4 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
m1 = cv::Mat(IMAGE_HEIGHT, IMAGE_WIDTH, CV_8UC3, cv::Scalar(0, 0, 0));
m2 = cv::Mat(IMAGE_HEIGHT, IMAGE_WIDTH, CV_8UC3, cv::Scalar(0, 0, 0));
m3 = cv::Mat(IMAGE_HEIGHT, IMAGE_WIDTH, CV_8UC3, cv::Scalar(0, 0, 0));
m4 = cv::Mat(IMAGE_HEIGHT, IMAGE_WIDTH, CV_8UC3, cv::Scalar(0, 0, 0));
double t = (double)cv::getTickCount();
switch (shoot) {
case 1: {
std::vector<std::pair<int, cv::Rect> > results;
detect(m1, m1, results);
RunSession(m1, m1, results);
break;
}
case 3:vec_in.push_back(m3);
@ -106,15 +114,15 @@ bool AlgJd::test_detect_batcht(int shoot)
case 4:vec_in.push_back(m4);
default: {
vec_in.push_back(m1);
detect_batch(vec_in, vec_out, vec_results);
RunSession(vec_in, vec_out, vec_results);
}
}
t = ((double)cv::getTickCount() - t) / cv::getTickFrequency();
DEBUG(" test_detect_batcht time process:%f\n", t);
return true;
DEBUG(" WarmUpSession time process:%f\n", t);
return (char*)"WarmUpSession pass";
}
int AlgJd::detect(cv::Mat& _frame, cv::Mat& out, std::vector<std::pair<int, cv::Rect> >& results)
char* YOLO_V3::RunSession(cv::Mat& _frame, cv::Mat& out, std::vector<std::pair<int, cv::Rect> >& results)
{
cv::Mat frame = _frame.clone();
cv::Mat image_clone = frame.clone();
@ -141,10 +149,11 @@ int AlgJd::detect(cv::Mat& _frame, cv::Mat& out, std::vector<std::pair<int, cv::
frame.convertTo(detectedFrame, CV_8U);
//show detectedFrame
out = detectedFrame.clone();
return results.size();
//return results.size();
return (char*)"pass";
}
void AlgJd::analyse(cv::Mat vec_in, std::vector<std::pair<int, cv::Rect> >& vec_results)
void YOLO_V3::analyse(cv::Mat vec_in, std::vector<std::pair<int, cv::Rect> >& vec_results)
{
for (int i = 0; i < vec_results.size(); i++)
{
@ -159,7 +168,7 @@ void AlgJd::analyse(cv::Mat vec_in, std::vector<std::pair<int, cv::Rect> >& vec_
}
// Get the names of the output layers
std::vector<cv::String> AlgJd::getOutputsNames(const cv::dnn::Net& net)
std::vector<cv::String> YOLO_V3::getOutputsNames(const cv::dnn::Net& net)
{
std::vector<cv::String> names;
if (names.empty())
@ -178,7 +187,7 @@ std::vector<cv::String> AlgJd::getOutputsNames(const cv::dnn::Net& net)
return names;
}
void AlgJd::post_process(cv::Mat& frame, std::vector<cv::Mat>& outs, std::vector<std::pair<int, cv::Rect> >& results)
void YOLO_V3::post_process(cv::Mat& frame, std::vector<cv::Mat>& outs, std::vector<std::pair<int, cv::Rect> >& results)
{
std::vector < std::vector<int>> classIds(classes.size());
std::vector < std::vector<float>> confidences(classes.size());
@ -233,7 +242,7 @@ void AlgJd::post_process(cv::Mat& frame, std::vector<cv::Mat>& outs, std::vector
}
// Draw the predicted bounding box
void AlgJd::drawPred(int classId, float conf, int left, int top, int right, int bottom, cv::Mat& frame)
void YOLO_V3::drawPred(int classId, float conf, int left, int top, int right, int bottom, cv::Mat& frame)
{
cv::Scalar color;
if (classId == 0)
@ -264,7 +273,7 @@ void AlgJd::drawPred(int classId, float conf, int left, int top, int right, int
cv::putText(frame, label, cv::Point(left, top - 5), cv::FONT_HERSHEY_SIMPLEX, 0.8, color, 1);
}
void AlgJd::detect_batch(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results)
char* YOLO_V3::RunSession(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results)
{
cv::Mat blobs;
std::vector<cv::Mat> image;
@ -295,9 +304,10 @@ void AlgJd::detect_batch(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec
out = detectedFrame.clone();
vec_out.push_back(out);
}
return (char*)"pass";
}
void AlgJd::post_process_batch(std::vector<cv::Mat>& vec_frame, std::vector<cv::Mat>& outs, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results)
void YOLO_V3::post_process_batch(std::vector<cv::Mat>& vec_frame, std::vector<cv::Mat>& outs, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results)
{
int batch = vec_frame.size();
double confidence;///
@ -356,7 +366,7 @@ void AlgJd::post_process_batch(std::vector<cv::Mat>& vec_frame, std::vector<cv::
{
int idx = indices[j];
cv::Rect box = boxes[i][idx];
if (confidences[i][idx] > g_sys_conf.ConfThreshold * 0.01)///识别度低于阈值NG处理
if (confidences[i][idx] > confThreshold)///识别度低于阈值NG处理
{
if (box.width > 15)
{//识别框宽度大于15显示识别小于认为无胶点NG处理

@ -1,5 +1,4 @@
#ifndef _CIGARETTE_JD
#define _CIGARETTE_JD
#pragma once
#include <opencv2/opencv.hpp>
#include <opencv2/dnn.hpp>
@ -8,18 +7,25 @@
#include <opencv2/highgui.hpp>
#include <iostream>
#include <fstream>
#include "common.h"
class AlgJd
#include "YOLOManager.h"
class YOLO_V3:public YOLO
{
public:
bool init(QString model_path, QString model_name);
bool test_detect();
bool test_detect_batcht(int shoot);
int detect(cv::Mat& in, cv::Mat& out, std::vector<std::pair<int, cv::Rect> >& results);
bool InitParam(DL_INIT_PARAM& iParams)override;
bool CreateSession(std::string model_path, std::string model_name)override;
bool CreateSession(std::string model_path)override;
char* RunSession(cv::Mat& in, cv::Mat& out, std::vector<std::pair<int, cv::Rect> >& results)override;
char* RunSession(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results)override;
char* WarmUpSession()override;
char* WarmUpSession(int shoot)override;
// Remove the bounding boxes with low confidence using non-maxima suppression
void post_process(cv::Mat& frame, std::vector<cv::Mat>& outs, std::vector<std::pair<int, cv::Rect> >& results);
void CircleImagePro(cv::Mat src, cv::Mat dst, std::vector<float> radius);
void detect_batch(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results);
// Remove the bounding boxes with low confidence using non-maxima suppression
void post_process_batch(std::vector<cv::Mat>& vec_frame, std::vector<cv::Mat>& outs, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results);
void analyse(cv::Mat vec_in, std::vector<std::pair<int, cv::Rect> >& vec_results);
@ -27,9 +33,13 @@ public:
std::vector<cv::String> getOutputsNames(const cv::dnn::Net& net);
// Draw the predicted bounding box
void drawPred(int classId, float conf, int left, int top, int right, int bottom, cv::Mat& frame);
private:
cv::dnn::Net net;
std::vector<std::string> classes;
};
#endif //end of _CIGARETTE_JD
float confThreshold; // Confidence threshold
float nmsThreshold; // Non-maximum suppression threshold
int inpWidth; // Width of network's input image
int inpHeight; // Height of network's input image
};

@ -0,0 +1,442 @@
#define _CRT_SECURE_NO_WARNINGS 1
#include "alg_jd_yolo8.h"
#include <regex>
#include <memory>
#define benchmark
#define min(a,b) (((a) < (b)) ? (a) : (b))
YOLO_V8::YOLO_V8() {
}
YOLO_V8::~YOLO_V8() {
delete session;
for (char* name : inputNodeNames) {
delete[] name;
}
for (char* name : outputNodeNames) {
delete[] name;
}
}
#ifdef USE_CUDA
namespace Ort
{
template<>
struct TypeToTensorType<half> { static constexpr ONNXTensorElementDataType type = ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16; };
}
#endif
template<typename T>
char* BlobFromImage(cv::Mat& iImg, T* iBlob) {
int channels = iImg.channels();
int imgHeight = iImg.rows;
int imgWidth = iImg.cols;
for (int c = 0; c < channels; c++) {
for (int h = 0; h < imgHeight; h++) {
for (int w = 0; w < imgWidth; w++) {
iBlob[c * imgWidth * imgHeight + h * imgWidth + w] = T(
(iImg.at<cv::Vec3b>(h, w)[c]) / 255.0f);
}
}
}
return RET_OK;
}
char* YOLO_V8::PreProcess(cv::Mat& iImg, std::vector<int> iImgSize, cv::Mat& oImg)
{
if (iImg.channels() == 3)
{
oImg = iImg.clone();
cv::cvtColor(oImg, oImg, cv::COLOR_BGR2RGB);
}
else
{
cv::cvtColor(iImg, oImg, cv::COLOR_GRAY2RGB);
}
switch (modelType)
{
case YOLO_DETECT_V8:
case YOLO_POSE:
case YOLO_DETECT_V8_HALF:
case YOLO_POSE_V8_HALF://LetterBox
{
if (iImg.cols >= iImg.rows)
{
resizeScales = iImg.cols / (float)iImgSize.at(0);
cv::resize(oImg, oImg, cv::Size(iImgSize.at(0), int(iImg.rows / resizeScales)));
}
else
{
resizeScales = iImg.rows / (float)iImgSize.at(0);
cv::resize(oImg, oImg, cv::Size(int(iImg.cols / resizeScales), iImgSize.at(1)));
}
cv::Mat tempImg = cv::Mat::zeros(iImgSize.at(0), iImgSize.at(1), CV_8UC3);
oImg.copyTo(tempImg(cv::Rect(0, 0, oImg.cols, oImg.rows)));
oImg = tempImg;
break;
}
case YOLO_CLS://CenterCrop
{
int h = iImg.rows;
int w = iImg.cols;
int m = min(h, w);
int top = (h - m) / 2;
int left = (w - m) / 2;
cv::resize(oImg(cv::Rect(left, top, m, m)), oImg, cv::Size(iImgSize.at(0), iImgSize.at(1)));
break;
}
}
return RET_OK;
}
bool YOLO_V8::InitParam(DL_INIT_PARAM& iParams)
{
// Initialize the parameters
rectConfidenceThreshold = iParams.rectConfidenceThreshold;
iouThreshold = iParams.iouThreshold;
imgSize = {640,640};
modelType = iParams.modelType;
cudaEnable = iParams.cudaEnable;
intraOpNumThreads = iParams.intraOpNumThreads;
logSeverityLevel = iParams.logSeverityLevel;
return true;
}
bool YOLO_V8::CreateSession(std::string model_path, std::string model_name) {
char* Ret = RET_OK;
// std::regex pattern("[\u4e00-\u9fa5]");
// bool result = std::regex_search(model_path, pattern);
// if (result) {
// return false;
// }
model_path += "/jd.onnx";
try {
env = Ort::Env(ORT_LOGGING_LEVEL_WARNING, "Yolo");
Ort::SessionOptions sessionOption;
if (cudaEnable) {
OrtCUDAProviderOptions cudaOption;
cudaOption.device_id = 0;
sessionOption.AppendExecutionProvider_CUDA(cudaOption);
}
sessionOption.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_ALL);
sessionOption.SetIntraOpNumThreads(intraOpNumThreads);
sessionOption.SetLogSeverityLevel(logSeverityLevel);
#ifdef _WIN32
int ModelPathSize = MultiByteToWideChar(CP_UTF8, 0, model_path.c_str(),
static_cast<int>(model_path.length()), nullptr, 0);
std::unique_ptr<wchar_t[]> wide_cstr(new wchar_t[ModelPathSize + 1]);
MultiByteToWideChar(CP_UTF8, 0, model_path.c_str(),
static_cast<int>(model_path.length()), wide_cstr.get(), ModelPathSize);
wide_cstr[ModelPathSize] = L'\0';
const wchar_t* modelPath = wide_cstr.get();
#else
const char* modelPath = model_path.c_str();
#endif
session = new Ort::Session(env, modelPath, sessionOption);
Ort::AllocatorWithDefaultOptions allocator;
// 获取输入节点
size_t inputNodesNum = session->GetInputCount();
for (size_t i = 0; i < inputNodesNum; i++) {
Ort::AllocatedStringPtr input_node_name = session->GetInputNameAllocated(i, allocator);
// 动态分配足够大的空间
size_t name_len = strlen(input_node_name.get()) + 1;
char* temp_buf = new char[name_len];
strcpy(temp_buf, input_node_name.get());
inputNodeNames.push_back(temp_buf);
}
// 获取输出节点
size_t OutputNodesNum = session->GetOutputCount();
for (size_t i = 0; i < OutputNodesNum; i++) {
Ort::AllocatedStringPtr output_node_name = session->GetOutputNameAllocated(i, allocator);
// 动态分配足够大的空间
size_t name_len = strlen(output_node_name.get()) + 1;
char* temp_buf = new char[name_len];
strcpy(temp_buf, output_node_name.get());
outputNodeNames.push_back(temp_buf);
}
options = Ort::RunOptions{ nullptr };
WarmUpSession();
return true;
}
catch (const std::exception& e) {
std::string result = "[YOLO_V8]:" + std::string(e.what());
char* merged = new char[result.length() + 1];
std::strcpy(merged, result.c_str());
std::cout << merged << std::endl;
delete[] merged;
return false;
}
}
bool YOLO_V8::CreateSession(std::string model_path)
{
return CreateSession(model_path, "");
}
//char* YOLO_V8::RunSession(cv::Mat& iImg, std::vector<DL_RESULT>& oResult) {
char* YOLO_V8::RunSession(cv::Mat& in, cv::Mat& out, std::vector<std::pair<int, cv::Rect> >& results){
std::vector<DL_RESULT> oResult;
out = in.clone();
#ifdef benchmark
clock_t starttime_1 = clock();
#endif
char* Ret = RET_OK;
cv::Mat processedImg;
PreProcess(in, imgSize, processedImg);
if (modelType < 4) {
std::unique_ptr<float[]> blob(new float[processedImg.total() * 3]);
BlobFromImage(processedImg, blob.get()); // 使用 .get() 获取原始指针
std::vector<int64_t> inputNodeDims = { 1, 3, imgSize.at(0), imgSize.at(1) };
TensorProcess(starttime_1, in, blob.get(), inputNodeDims, oResult);
}
else {
#ifdef USE_CUDA
std::unique_ptr<half[]> blob(new half[processedImg.total() * 3]);
BlobFromImage(processedImg, blob.get()); // 使用 .get() 获取原始指针
std::vector<int64_t> inputNodeDims = { 1, 3, imgSize.at(0), imgSize.at(1) };
TensorProcess(starttime_1, in, blob.get(), inputNodeDims, oResult);
#endif
}
// 绘制检测结果
for (auto& re : oResult) {
cv::RNG rng(cv::getTickCount());
//cv::Scalar color(rng.uniform(0, 256), rng.uniform(0, 256), rng.uniform(0, 256));
cv::Scalar color(0, 255, 0);
cv::rectangle(out, re.box, color, 3);
float confidence = floor(100 * re.confidence) / 100;
std::cout << std::fixed << std::setprecision(2);
std::string label = std::string("jd") + std::string(" ") +
std::to_string(confidence).substr(0, std::to_string(confidence).size() - 4);
cv::rectangle(
out,
cv::Point(re.box.x, re.box.y - 25),
cv::Point(re.box.x + label.length() * 15, re.box.y),
color,
cv::FILLED
);
cv::putText(
out,
label,
cv::Point(re.box.x, re.box.y - 5),
cv::FONT_HERSHEY_SIMPLEX,
0.75,
cv::Scalar(0, 0, 0),
2
);
}
if(!oResult.empty())
{
for(uint16_t index=0;index<oResult.size();index++)
{
results.push_back(std::make_pair(oResult[index].classId, oResult[index].box));
}
}
return Ret;
}
char* YOLO_V8::RunSession(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results){
std::vector<DL_RESULT> oResult;
return (char*)"RunSession Undefined";
}
template<typename T>
char* YOLO_V8::TensorProcess(clock_t& starttime_1, cv::Mat& iImg, T* blob,
std::vector<int64_t>& inputNodeDims,
std::vector<DL_RESULT>& oResult) {
// 创建输入张量
Ort::Value inputTensor = Ort::Value::CreateTensor<T>(
Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU),
blob,
3 * imgSize.at(0) * imgSize.at(1),
inputNodeDims.data(), inputNodeDims.size());
#ifdef benchmark
clock_t starttime_2 = clock();
#endif
// 运行推理
auto outputTensor = session->Run(options, inputNodeNames.data(), &inputTensor, 1,
outputNodeNames.data(), outputNodeNames.size());
#ifdef benchmark
clock_t starttime_3 = clock();
#endif
// 检查输出是否有效
if (outputTensor.empty()) {
return (char*)"[YOLO_V8]: No output from model";
}
Ort::TypeInfo typeInfo = outputTensor.front().GetTypeInfo();
auto tensor_info = typeInfo.GetTensorTypeAndShapeInfo();
std::vector<int64_t> outputNodeDims = tensor_info.GetShape();
// 根据模型类型处理输出
switch (modelType) {
case YOLO_DETECT_V8:
case YOLO_DETECT_V8_HALF: {
// 检测模型的输出处理代码
int signalResultNum = outputNodeDims[1];//84
int strideNum = outputNodeDims[2];//8400
std::vector<int> class_ids;
std::vector<float> confidences;
std::vector<cv::Rect> boxes;
cv::Mat rawData;
if (modelType == YOLO_DETECT_V8) {
// FP32
rawData = cv::Mat(signalResultNum, strideNum, CV_32F, outputTensor.front().GetTensorMutableData<T>());
}
else {
// FP16
rawData = cv::Mat(signalResultNum, strideNum, CV_16F, outputTensor.front().GetTensorMutableData<T>());
rawData.convertTo(rawData, CV_32F);
}
// 处理检测结果...
rawData = rawData.t();
float* data = (float*)rawData.data;
for (int i = 0; i < strideNum; ++i) {
float* classesScores = data + 4;
cv::Mat scores(1, this->classes.size(), CV_32FC1, classesScores);
cv::Point class_id;
double maxClassScore;
cv::minMaxLoc(scores, 0, &maxClassScore, 0, &class_id);
if (maxClassScore > rectConfidenceThreshold) {
confidences.push_back(maxClassScore);
class_ids.push_back(class_id.x);
float x = data[0];
float y = data[1];
float w = data[2];
float h = data[3];
int left = int((x - 0.5 * w) * resizeScales);
int top = int((y - 0.5 * h) * resizeScales);
int width = int(w * resizeScales);
int height = int(h * resizeScales);
boxes.push_back(cv::Rect(left, top, width, height));
}
data += signalResultNum;
}
std::vector<int> nmsResult;
cv::dnn::NMSBoxes(boxes, confidences, rectConfidenceThreshold, iouThreshold, nmsResult);
for (int i = 0; i < nmsResult.size(); ++i) {
int idx = nmsResult[i];
DL_RESULT result;
result.classId = class_ids[idx];
result.confidence = confidences[idx];
result.box = boxes[idx];
oResult.push_back(result);
}
break;
}
case YOLO_CLS:
case YOLO_CLS_HALF: {
// 分类模型的输出处理代码
cv::Mat rawData;
if (modelType == YOLO_CLS) {
// FP32
rawData = cv::Mat(1, this->classes.size(), CV_32F, outputTensor.front().GetTensorMutableData<T>());
}
else {
// FP16
rawData = cv::Mat(1, this->classes.size(), CV_16F, outputTensor.front().GetTensorMutableData<T>());
rawData.convertTo(rawData, CV_32F);
}
float* data = (float*)rawData.data;
DL_RESULT result;
for (int i = 0; i < this->classes.size(); i++) {
result.classId = i;
result.confidence = data[i];
oResult.push_back(result);
}
break;
}
default:
std::cout << "[YOLO_V8]: " << "Not support model type." << std::endl;
}
#ifdef benchmark
clock_t starttime_4 = clock();
double pre_process_time = (double)(starttime_2 - starttime_1) / CLOCKS_PER_SEC * 1000;
double process_time = (double)(starttime_3 - starttime_2) / CLOCKS_PER_SEC * 1000;
double post_process_time = (double)(starttime_4 - starttime_3) / CLOCKS_PER_SEC * 1000;
// 输出性能信息...
#endif
return RET_OK;
}
template char* YOLO_V8::TensorProcess<float>(clock_t&, cv::Mat&, float*, std::vector<int64_t>&, std::vector<DL_RESULT>&);
#ifdef USE_CUDA
template char* YOLO_V8::TensorProcess<half>(clock_t&, cv::Mat&, half*, std::vector<int64_t>&, std::vector<DL_RESULT>&);
#endif
char* YOLO_V8::WarmUpSession() {
clock_t starttime_1 = clock();
cv::Mat iImg = cv::Mat(cv::Size(imgSize.at(0), imgSize.at(1)), CV_8UC3);
cv::Mat processedImg;
PreProcess(iImg, imgSize, processedImg);
if (modelType < 4) {
std::unique_ptr<float[]> blob(new float[processedImg.total() * 3]);
BlobFromImage(processedImg, blob.get());
std::vector<int64_t> inputNodeDims = { 1, 3, imgSize.at(0), imgSize.at(1) };
Ort::Value input_tensor = Ort::Value::CreateTensor<float>(
Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU),
blob.get(), 3 * imgSize.at(0) * imgSize.at(1),
inputNodeDims.data(), inputNodeDims.size());
auto output_tensors = session->Run(options, inputNodeNames.data(), &input_tensor, 1,
outputNodeNames.data(), outputNodeNames.size());
}
else {
#ifdef USE_CUDA
std::unique_ptr<half[]> blob(new half[processedImg.total() * 3]);
BlobFromImage(processedImg, blob.get());
std::vector<int64_t> inputNodeDims = { 1, 3, imgSize.at(0), imgSize.at(1) };
Ort::Value input_tensor = Ort::Value::CreateTensor<half>(
Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU),
blob.get(), 3 * imgSize.at(0) * imgSize.at(1),
inputNodeDims.data(), inputNodeDims.size());
auto output_tensors = session->Run(options, inputNodeNames.data(), &input_tensor, 1,
outputNodeNames.data(), outputNodeNames.size());
#endif
}
return RET_OK;
}
char* YOLO_V8::WarmUpSession(int shoot)
{
return (char*)"WarmUpSession Undefined";
}

@ -0,0 +1,67 @@
#pragma once
#define RET_OK nullptr
#ifdef _WIN32
#include <Windows.h>
#include <direct.h>
#include <io.h>
#endif
#include <string>
#include <vector>
#include <cstdio>
#include <opencv2/opencv.hpp>
#include <onnxruntime_cxx_api.h>
#include "YOLOManager.h"
#ifdef USE_CUDA
#include <cuda_fp16.h>
#endif
class YOLO_V8 :public YOLO
{
public:
YOLO_V8();
~YOLO_V8();
public:
bool InitParam(DL_INIT_PARAM& iParams)override;
bool CreateSession(std::string model_path, std::string model_name)override;
bool CreateSession(std::string model_path)override;
char* RunSession(cv::Mat& in, cv::Mat& out, std::vector<std::pair<int, cv::Rect> >& results)override;
char* RunSession(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results)override;
char* WarmUpSession()override;
char* WarmUpSession(int shoot)override;
template<typename T>
char* TensorProcess(clock_t& starttime_1, cv::Mat& iImg, T* blob,
std::vector<int64_t>& inputNodeDims,
std::vector<DL_RESULT>& oResult);
char* PreProcess(cv::Mat& iImg, std::vector<int> iImgSize, cv::Mat& oImg);
std::vector<std::string> classes{};
private:
Ort::Env env;
Ort::Session* session;
bool cudaEnable;
Ort::RunOptions options;
//std::vector<const char*> inputNodeNames;
//std::vector<const char*> outputNodeNames;
int logSeverityLevel;
int intraOpNumThreads;
MODEL_TYPE modelType;
std::vector<int> imgSize;
float rectConfidenceThreshold;
float iouThreshold;
float resizeScales;//letterbox scale
std::vector<char*> inputNodeNames;
std::vector<char*> outputNodeNames;
};

@ -21,7 +21,7 @@
<PropertyGroup Label="Globals">
<ProjectGuid>{B12702AD-ABFB-343A-A199-8E24837244A3}</ProjectGuid>
<Keyword>QtVS_v304</Keyword>
<WindowsTargetPlatformVersion>10.0.22621.0</WindowsTargetPlatformVersion>
<WindowsTargetPlatformVersion>10.0.26100.0</WindowsTargetPlatformVersion>
<QtMsBuild Condition="'$(QtMsBuild)'=='' or !Exists('$(QtMsBuild)\qt.targets')">$(MSBuildProjectDirectory)\QtMsBuild</QtMsBuild>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
@ -137,7 +137,7 @@
<ClCompile>
<PreprocessorDefinitions>UNICODE;_UNICODE;WIN32;WIN64;NOMINMAX;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>GeneratedFiles\$(ConfigurationName);GeneratedFiles;.\GeneratedFiles;.;.\GeneratedFiles\$(ConfigurationName);G:\code_library\c\opencv\4.3\build-opencv-cpu\include;$(ProjectDir)MvIMPACT;$(ProjectDir)Pylon6.2\include;$(ProjectDir)Common;$(ProjectDir)modbus;$(ProjectDir)MVS3.2.1\Include;$(ProjectDir)PLC;$(ProjectDir)Ui;$(ProjectDir)Thread;$(ProjectDir)3rd\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<Optimization>MaxSpeed</Optimization>
<Optimization>Disabled</Optimization>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
<TreatWChar_tAsBuiltInType>true</TreatWChar_tAsBuiltInType>
@ -149,7 +149,7 @@
<SubSystem>Console</SubSystem>
<OutputFile>$(OutDir)\$(ProjectName).exe</OutputFile>
<AdditionalLibraryDirectories>G:\code_library\c\opencv\4.3\build-opencv-cpu\x64\vc15\lib;$(ProjectDir)Pylon6.2\lib\Win64;$(ProjectDir)3rd\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<GenerateDebugInformation>false</GenerateDebugInformation>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalDependencies>opencv_world430d.lib;modbus.lib;mvDeviceManager.lib;MvCameraControl.lib;Qt5Mqttd.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
<QtMoc>
@ -211,23 +211,23 @@
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<PreprocessorDefinitions>UNICODE;_UNICODE;WIN32;WIN64;QT_NO_DEBUG;NDEBUG;NOMINMAX;WIN32_LEAN_AND_MEAN;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>GeneratedFiles\$(ConfigurationName);GeneratedFiles;.\GeneratedFiles;.;.\GeneratedFiles\$(ConfigurationName);$(ProjectDir)MvIMPACT;$(ProjectDir)OpenCV455Simple\include;$(ProjectDir)Common;$(ProjectDir)Pylon6.2\include;$(ProjectDir)modbus;$(ProjectDir)MVS3.2.1\Include;$(ProjectDir)PLC;$(ProjectDir)Ui;$(ProjectDir)Thread;$(ProjectDir)3rd\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AdditionalIncludeDirectories>GeneratedFiles\$(ConfigurationName);GeneratedFiles;.\GeneratedFiles;.;.\GeneratedFiles\$(ConfigurationName);$(ProjectDir)MvIMPACT;$(ProjectDir)OpenCV455Simple\include;$(ProjectDir)Common;$(ProjectDir)Pylon6.2\include;$(ProjectDir)modbus;$(ProjectDir)MVS3.2.1\Include;$(ProjectDir)PLC;$(ProjectDir)Ui;$(ProjectDir)Thread;$(ProjectDir)3rd\include;$(ProjectDir)Alg;$(ProjectDir)ONNX\ONNX-cpu-1.13.1\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<TreatWChar_tAsBuiltInType>true</TreatWChar_tAsBuiltInType>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<LanguageStandard>stdcpp14</LanguageStandard>
<Optimization>MaxSpeed</Optimization>
<Optimization>Disabled</Optimization>
<SupportJustMyCode>false</SupportJustMyCode>
<DisableSpecificWarnings>4819;%(DisableSpecificWarnings)</DisableSpecificWarnings>
<DisableSpecificWarnings>4819;4566;%(DisableSpecificWarnings)</DisableSpecificWarnings>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<OutputFile>$(OutDir)\$(ProjectName).exe</OutputFile>
<AdditionalLibraryDirectories>$(ProjectDir)OpenCV455Simple\win64\vc15\lib;$(ProjectDir)Pylon6.2\lib\Win64;$(ProjectDir)MvIMPACT\lib\win64;$(ProjectDir)MVS3.2.1\lib\win64;$(ProjectDir)modbus;$(ProjectDir)3rd\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<GenerateDebugInformation>false</GenerateDebugInformation>
<AdditionalDependencies>opencv_world455.lib;modbus.lib;mvDeviceManager.lib;MvCameraControl.lib;Qt5Mqtt.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>$(ProjectDir)OpenCV455Simple\win64\vc15\lib;$(ProjectDir)Pylon6.2\lib\Win64;$(ProjectDir)MvIMPACT\lib\win64;$(ProjectDir)MVS3.2.1\lib\win64;$(ProjectDir)modbus;$(ProjectDir)3rd\lib;$(ProjectDir)ONNX\ONNX-cpu-1.13.1\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalDependencies>opencv_world455.lib;modbus.lib;mvDeviceManager.lib;MvCameraControl.lib;Qt5Mqtt.lib;onnxruntime.lib;%(AdditionalDependencies)</AdditionalDependencies>
<IgnoreSpecificDefaultLibraries>
</IgnoreSpecificDefaultLibraries>
</Link>
@ -253,7 +253,9 @@
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="AlarmInfo.cpp" />
<ClCompile Include="alg_jd.cpp" />
<ClCompile Include="Alg\alg_jd_yolo3.cpp" />
<ClCompile Include="Alg\alg_jd_yolo8.cpp" />
<ClCompile Include="Alg\YOLOManager.cpp" />
<ClCompile Include="ASyncQueue.cpp" />
<ClCompile Include="balluffcamera.cpp" />
<ClCompile Include="basecamera.cpp" />
@ -306,6 +308,9 @@
<QtUic Include="Ui\plcsetup.ui" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="Alg\alg_jd_yolo3.h" />
<ClInclude Include="Alg\alg_jd_yolo8.h" />
<ClInclude Include="Alg\YOLOManager.h" />
<ClInclude Include="FtpManager.h" />
<QtMoc Include="Ui\output_statistic.h" />
<QtMoc Include="Ui\change_shift.h" />
@ -330,7 +335,6 @@
<QtMoc Include="Thread\workthread.h" />
<ClInclude Include="tinyxml2.h" />
<ClInclude Include="AlarmInfo.h" />
<ClInclude Include="alg_jd.h" />
<ClInclude Include="ASyncQueue.h" />
<ClInclude Include="balluffcamera.h" />
<ClInclude Include="baslercamera.h" />

@ -45,6 +45,12 @@
<Filter Include="Source Files\Thread">
<UniqueIdentifier>{91e6e8fc-6df2-4732-839b-4f873694fc78}</UniqueIdentifier>
</Filter>
<Filter Include="Source Files\Alg">
<UniqueIdentifier>{65df07cd-a8ba-4d7b-bec8-75d2cc5b744a}</UniqueIdentifier>
</Filter>
<Filter Include="Header Files\Alg">
<UniqueIdentifier>{488e44eb-1d85-49cc-83eb-08e589998bea}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="main.cpp">
@ -59,9 +65,6 @@
<ClCompile Include="SyncQueue.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="alg_jd.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="balluffcamera.cpp">
<Filter>Source Files</Filter>
</ClCompile>
@ -155,6 +158,15 @@
<ClCompile Include="Thread\threadSendMqtt.cpp">
<Filter>Source Files\Thread</Filter>
</ClCompile>
<ClCompile Include="Alg\alg_jd_yolo3.cpp">
<Filter>Source Files\Alg</Filter>
</ClCompile>
<ClCompile Include="Alg\alg_jd_yolo8.cpp">
<Filter>Source Files\Alg</Filter>
</ClCompile>
<ClCompile Include="Alg\YOLOManager.cpp">
<Filter>Source Files\Alg</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<QtMoc Include="cigarette.h">
@ -234,9 +246,6 @@
<ClInclude Include="SyncQueue.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="alg_jd.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="balluffcamera.h">
<Filter>Header Files</Filter>
</ClInclude>
@ -276,6 +285,15 @@
<ClInclude Include="Thread\threadSendMqtt.h">
<Filter>Header Files\Thread</Filter>
</ClInclude>
<ClInclude Include="Alg\alg_jd_yolo3.h">
<Filter>Header Files\Alg</Filter>
</ClInclude>
<ClInclude Include="Alg\alg_jd_yolo8.h">
<Filter>Header Files\Alg</Filter>
</ClInclude>
<ClInclude Include="Alg\YOLOManager.h">
<Filter>Header Files\Alg</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<QtUic Include="Ui\alarmdialog.ui">

@ -10,8 +10,20 @@
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<QtLastBackgroundBuild>2024-10-04T06:46:34.3746998Z</QtLastBackgroundBuild>
<QtTouchProperty>
</QtTouchProperty>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<QtLastBackgroundBuild>2024-10-04T06:46:35.5916473Z</QtLastBackgroundBuild>
<QtLastBackgroundBuild>2025-09-08T06:21:32.5149335Z</QtLastBackgroundBuild>
<QtTouchProperty>
</QtTouchProperty>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<QtTouchProperty>
</QtTouchProperty>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<QtTouchProperty>
</QtTouchProperty>
</PropertyGroup>
</Project>

@ -234,7 +234,7 @@ CaptureThreadHIK::CaptureThreadHIK(void* camhandle, bool boTerminated, int Num)
//-----------------------------------------------------------------------------
{
p_unit_queue = new ASyncQueue<cv::Mat>(Unit_Queue_Size);
g_pImage_buf = (unsigned char*)malloc(3000 * 3000 * 3);
g_pImage_buf = (unsigned char*)malloc(2*(IMAGE_HEIGHT * IMAGE_WIDTH * 3));//乘2防爆
}
CaptureThreadHIK::~CaptureThreadHIK()
{

@ -1,5 +1,5 @@
#include "SyncWorkThread.h"
#include "alg_jd.h"
#include "YOLOManager.h"
#include "common.h"
#include "balluffcamera.h"
#include "baslercamera.h"
@ -8,7 +8,6 @@
#include "exportData.h"
#include <QMap>
extern AlgJd alg_jd[NumberOfSupportedCameras]; //检测胶点的AI算法
extern ConfPath g_conf_path;
extern SysConf g_sys_conf; //系统配置参数
extern DisplayLabelConf g_display_label_conf[NumberOfSupportedCameras];
@ -70,7 +69,7 @@ void SyncWorkThread::run()
local_g_image_sync_queue->take(element_vec);
bool IsNGForAll = false;
int merge_index;
cv::Mat merge_image = cv::Mat::zeros(512 * work_camera_nums, 640 * g_sys_conf.shoot[0], CV_8UC3);
cv::Mat merge_image = cv::Mat::zeros(IMAGE_HEIGHT * work_camera_nums, IMAGE_WIDTH * g_sys_conf.shoot[0], CV_8UC3);
cv::Rect roi;
int j = 0; // 实际工作的相机标识element_vec中可能有相机没在工作
for (int i = 0; i < element_vec.size(); i++)//每个相机的图轮流遍历
@ -120,7 +119,7 @@ void SyncWorkThread::run()
{
cv::cvtColor(image, image, CV_BGR2RGB); //灰度图像转为彩色图像
}
cv::resize(image, image, cv::Size(640, 512 * unit_count));
cv::resize(image, image, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT * unit_count));
if (local_SysConf.shoot[local_camera_number] == unit_count)
{
std::vector<cv::Mat> vec_in;
@ -145,12 +144,12 @@ void SyncWorkThread::run()
std::vector<std::pair<int, cv::Rect>> results;
cv::Mat imagein, imageout;
imagein = vec_in[0];
alg_jd[local_camera_number].detect(imagein, imageout, results);
YOLOManager::GetInstance().GetDetector(local_camera_number)->RunSession(imagein, imageout, results);
vec_out.push_back(imageout.clone());
vec_results.push_back(results);
}
else {
alg_jd[local_camera_number].detect_batch(vec_in, vec_out, vec_results);
YOLOManager::GetInstance().GetDetector(local_camera_number)->RunSession(vec_in, vec_out, vec_results);
}
QDateTime ts_jd = QDateTime::currentDateTime();
int time_process = ts_start.msecsTo(ts_jd);
@ -204,10 +203,10 @@ void SyncWorkThread::run()
if (merge_index == work_camera_nums * unit_count) {
file_name = g_conf_path.save_pics_path + "/"
+ now_ts.toString("yyyy-MM-dd") + "/"
+ now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + ".jpg";
+ now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + SUFFIX;
g_save_queue->put(std::make_pair(file_name.toLocal8Bit().constData(), merge_image));
#ifdef __TCPSend
QString sendName = now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + ".jpg";
QString sendName = now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + SUFFIX;
TCPSendInfo.pics_name = sendName.toLocal8Bit().constData();
TCP_Info_queue->put(TCPSendInfo);
#endif
@ -279,11 +278,11 @@ void SyncWorkThread::run()
+ QString::number(local_camera_number + 1) + "/" + QString::number(index + 1) + "/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
QString remotePath = "/image/ng/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
g_save_queue->put(std::make_pair(file_name.toLocal8Bit().constData(), m));
m = vec_out[index].clone();
@ -292,11 +291,11 @@ void SyncWorkThread::run()
+ QString::number(local_camera_number + 1) + "/" + QString::number(index + 1) + "/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
remotePath = "/image/ng_result/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
//g_save_queue->put(std::make_pair(file_name.toStdString(), m));
g_save_queue->put(std::make_pair(file_name.toLocal8Bit().constData(), m));
}

@ -1,6 +1,8 @@
#include "threadReceive.h"
#include "cigarette.h"
#include "basecamera.h"
#include <fstream>
void threadReceive::start_work()
{
//start(HighestPriority);

@ -1,5 +1,5 @@
#include "workthread.h"
#include "alg_jd.h"
#include "YOLOManager.h"
#include "common.h"
#include "balluffcamera.h"
#include "baslercamera.h"
@ -8,7 +8,6 @@
#include "exportData.h"
#include <QMap>
extern AlgJd alg_jd[NumberOfSupportedCameras]; //检测胶点的AI算法
extern ConfPath g_conf_path;
extern SysConf g_sys_conf; //系统配置参数
extern DisplayLabelConf g_display_label_conf[NumberOfSupportedCameras];
@ -128,12 +127,12 @@ void WorkThread::run()
std::vector<std::pair<int, cv::Rect> > results;
cv::Mat imagein, imageout;
imagein = vec_in[0];
alg_jd[local_camera_number].detect(imagein, imageout, results);
YOLOManager::GetInstance().GetDetector(local_camera_number)->RunSession(imagein, imageout, results);
vec_out.push_back(imageout.clone());
vec_results.push_back(results);
}
else {
alg_jd[local_camera_number].detect_batch(vec_in, vec_out, vec_results);
YOLOManager::GetInstance().GetDetector(local_camera_number)->RunSession(vec_in, vec_out, vec_results);
}
QDateTime ts_jd = QDateTime::currentDateTime();
int time_process = ts_start.msecsTo(ts_jd);
@ -185,12 +184,12 @@ void WorkThread::run()
now_ts.toString("yyyy-MM-dd") + "/"
+ QString::number(local_camera_number + 1) + "/" + QString::number(index + 1) + "/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + ".jpg";
"#" + "_" + QString::number(index + 1) + SUFFIX;
std::string filename = file_name.toLocal8Bit().constData();
g_save_queue->put(std::make_pair(filename, m));
QString sendName = now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + ".jpg";
"#" + "_" + QString::number(index + 1) + SUFFIX;
#ifdef __TCPSend
TCPSendInfo.pics_name = sendName.toLocal8Bit().constData();
TCP_Info_queue->put(TCPSendInfo);
@ -262,11 +261,11 @@ void WorkThread::run()
+ QString::number(local_camera_number + 1) + "/" + QString::number(index + 1) + "/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
QString remotePath = "/image/ng/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
g_save_queue->put(std::make_pair(file_name.toLocal8Bit().constData(), m));
m = vec_out[index].clone();
@ -275,11 +274,11 @@ void WorkThread::run()
+ QString::number(local_camera_number + 1) + "/" + QString::number(index + 1) + "/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
remotePath = "/image/ng_result/" +
now_ts.toString("yyyy-MM-dd_HH-mm-ss_zzz_") + QString::number(local_camera_number + 1) +
"#" + "_" + QString::number(index + 1) + "_" + ng_reason_maps[ngReason] +
".jpg";
SUFFIX;
//g_save_queue->put(std::make_pair(file_name.toStdString(), m));
g_save_queue->put(std::make_pair(file_name.toLocal8Bit().constData(), m));
}

@ -1,517 +0,0 @@
#include "alg_jd_ng.h"
#include <direct.h> //所需的库文件
extern SysConf g_sys_conf;
// Initialize the parameters
static float confThreshold = 0.01; // Confidence threshold
static float nmsThreshold = 0.4; // Non-maximum suppression threshold
static int inpWidth = 416; // Width of network's input image
static int inpHeight = 416; // Height of network's input image
static std::vector<std::string> classes;
bool AlgJd_ng::init_ng(QString model_path, QString model_name)
{
// Load names of classes
std::string classesFile;
cv::String modelConfiguration;
cv::String modelWeights;
QString image_path;
modelWeights = model_path.toStdString() + "/" + model_name.toStdString();
modelConfiguration = model_path.toStdString() + "/jd_ng.cfg";
classesFile = model_path.toStdString() + "/jd_ng.names";
image_path = model_path + "/" + "alg_jd_ng.bmp";
std::ifstream classNamesFile(classesFile.c_str());
if (classNamesFile.is_open())
{
std::string className = "";
while (std::getline(classNamesFile, className))
classes.push_back(className);
}
else{
return false;
}
// Load the network
net_ng = cv::dnn::readNetFromDarknet(modelConfiguration, modelWeights);
net_ng.setPreferableBackend(cv::dnn::DNN_BACKEND_CUDA);
net_ng.setPreferableTarget(cv::dnn::DNN_TARGET_CUDA);
//cv::Mat image = cv::imread("alg_jd.bmp");
cv::Mat image = cv::imread(image_path.toStdString());
//识别一张图测试模型是否正确并且完成GPU数据加载
if (!image.data) return false; //判断测试图片是否正常读取
std::vector<std::pair<int, cv::Rect> > results;
detect_ng(image, image, results);
if (results.size() > 0)
return true; //检测到目标,则初始化成功
else
return false; //否则初始化失败
}
bool AlgJd_ng::test_detect_ng()
{
cv::Mat m1;
m1 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
std::vector<std::pair<int, cv::Rect> > results;
double t = (double)cv::getTickCount();
detect_ng(m1, m1, results);
t = ((double)cv::getTickCount() - t) / cv::getTickFrequency();
DEBUG(" test_detect time process:%f\n", t);
return true;
}
bool AlgJd_ng::test_detect_batcht_ng(int shoot)
{
std::vector<cv::Mat> vec_in;
std::vector<cv::Mat> vec_out;
std::vector<std::vector<std::pair<int, cv::Rect> > > vec_results;
cv::Mat m1, m2, m3;
m1 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
m2 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
m3 = cv::Mat(544, 728, CV_8UC3, cv::Scalar(0, 0, 0));
double t = (double)cv::getTickCount();
switch(shoot){
case 1:{
std::vector<std::pair<int, cv::Rect> > results;
detect_ng(m1, m1, results);
break;
}
case 3:vec_in.push_back(m3);
case 2:vec_in.push_back(m2);
default:{
vec_in.push_back(m1);
detect_batch_ng(vec_in, vec_out, vec_results);
}
}
t = ((double)cv::getTickCount() - t) / cv::getTickFrequency();
DEBUG(" test_detect_batcht time process:%f\n", t);
return true;
}
int AlgJd_ng::detect_ng(cv::Mat& _frame, cv::Mat &out, std::vector<std::pair<int, cv::Rect> > &results)
{
cv::Mat frame = _frame.clone();
cv::Mat image_clone=frame.clone();
// Process frames.
// Create a 4D blob from a frame.
cv::Mat blob;
cv::dnn::blobFromImage(frame, blob, 1/255.0, cv::Size(inpWidth, inpHeight), cv::Scalar(0,0,0), true, false);
//Sets the input to the network
net_ng.setInput(blob);
// Runs the forward pass to get output of the output layers
std::vector<cv::Mat> outs;
net_ng.forward(outs, getOutputsNames_ng(net_ng));
// Remove the bounding boxes with low confidence
post_process_ng(frame, outs, results);
// Write the frame with the detection boxes
cv::Mat detectedFrame;
frame.convertTo(detectedFrame, CV_8U);
//show detectedFrame
out=detectedFrame.clone();
return results.size();
}
int AlgJd_ng::detect_ng(cv::Mat& _frame, cv::Mat& draw_frame, cv::Mat& out, std::vector<std::pair<int, cv::Rect>>& results)
{
cv::Mat frame = _frame.clone();
// Process frames.
// Create a 4D blob from a frame.
// 创建4D的blob用于网络输入
cv::Mat blob;
cv::dnn::blobFromImage(frame, blob, 1 / 255.0, cv::Size(inpWidth, inpHeight), cv::Scalar(0, 0, 0), true, false);
//Sets the input to the network
net_ng.setInput(blob);
// Runs the forward pass to get output of the output layers
std::vector<cv::Mat> outs;
net_ng.forward(outs, getOutputsNames_ng(net_ng));
// Remove the bounding boxes with low confidence
// 删除低置信度的边界框
post_process_ng(draw_frame, outs, results);
// Write the frame with the detection boxes
cv::Mat detectedFrame;
draw_frame.convertTo(detectedFrame, CV_8U);
//show detectedFrame
out = detectedFrame.clone();
return results.size();
}
void AlgJd_ng::CircleImagePro_ng(cv::Mat src, cv::Mat dst, std::vector<float> radius) {
QStringList TestData;
src.copyTo(dst);
float c = float(20.5) / float(68.9);
std::vector<std::vector<cv::Point>> contours;
// 图像预处理
cv::Mat img;
cv::cvtColor(dst, img, cv::COLOR_BGR2GRAY);
GaussianBlur(img, img, cv::Size(3, 3), 1);
Canny(img, img, 50, 150, 3);
imshow("img", img);
cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3)); // 获取卷积核
dilate(img, img, kernel, cv::Point(-1, -1), 2);
findContours(img, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
img.release();
std::vector<cv::Point2f> center(contours.size());
std::vector<float> rad(contours.size());
if (contours.size() == 0) {
std::cout << "未找到检测物" << std::endl;
return;
}
for (size_t i = 0; i < contours.size(); ++i) {
minEnclosingCircle(contours[i], center[i], rad[i]);
radius.push_back(rad[i] * c);
}
for (size_t j = 0; j < center.size(); ++j) {
std::string str = "radius: ";
std::string str1;
float r = 0.0;
circle(dst, center[j], rad[j], cv::Scalar(0, 0, 255), 1);
r = rad[j] * c;
str1 = format("%.4f", r);
str.insert(str.end(), str1.begin(), str1.end());
TestData.append(QString::fromStdString(str));
cv::imshow("dst", dst);
cv::putText(dst, str, center[j], 1, 2, cv::Scalar(0, 255, 0), 2, 8);
cv::waitKey(1);
}
TestData.clear();
}
void AlgJd_ng::analyse_ng(cv::Mat vec_in, std::vector<std::pair<int, cv::Rect>>& vec_results)
{
bool IsNG = false;
std::vector<cv::Rect> vec_jd_results; // 0 胶点
std::vector<cv::Rect> vec_kz_results; // 1 卡纸距离
for (int i = 0; i < vec_results.size(); i++)
{
if (vec_results[i].first == 0) // jd
{
vec_jd_results.push_back(vec_results[i].second);
}
else if (vec_results[i].first == 1) // 卡纸距离
{
vec_kz_results.push_back(vec_results[i].second);
}
}
std::sort(vec_jd_results.begin(), vec_jd_results.end(), sort_rect_by_y_center_ng);//升序排列,//jinhuan+
//jinhuan+
if (vec_jd_results.size() && vec_kz_results.size())
{
//从上往下第一个胶点的中心y值
int top_jd_y = vec_jd_results[0].y + vec_jd_results[0].height / 2;
//从上往下最后一个胶点的中心y值
int bottom_jd_y = vec_jd_results[vec_jd_results.size()-1].y + vec_jd_results[vec_jd_results.size()-1].height / 2;
//卡纸的上边沿y值
int top_kz_y = vec_kz_results[0].tl().y;
//卡纸的下边沿y值
int bottom_kz_y = vec_kz_results[0].br().y;
//if(abs(top_jd_y- top_kz_y)>50);//50个像素
//if (abs(bottom_kz_y - bottom_jd_y)>40);//40个像素
} else {
IsNG = true;//卡纸和胶点数量不够
}
//if (vec_kz_results.size() != 1) {//卡纸有褶皱
// IsNG = true;
//}
//jinhuan-
cv::waitKey(1);
}
// Get the names of the output layers
std::vector<cv::String> AlgJd_ng::getOutputsNames_ng(const cv::dnn::Net& net)
{
std::vector<cv::String> names;
if (names.empty())
{
//Get the indices of the output layers, i.e. the layers with unconnected outputs
std::vector<int> outLayers = net.getUnconnectedOutLayers();
//get the names of all the layers in the network
std::vector<cv::String> layersNames = net.getLayerNames();
// Get the names of the output layers in names
names.resize(outLayers.size());
for (size_t i = 0; i < outLayers.size(); ++i)
names[i] = layersNames[outLayers[i] - 1];
}
return names;
}
void AlgJd_ng::post_process_ng(cv::Mat& frame, std::vector<cv::Mat>& outs, std::vector<std::pair<int, cv::Rect> > &results)
{
std::vector < std::vector<int>> classIds(classes.size());
std::vector < std::vector<float>> confidences(classes.size());
std::vector < std::vector<cv::Rect>> boxes(classes.size());
for (size_t i = 0; i < outs.size(); ++i)
{
// Scan through all the bounding boxes output from the network and keep only the
// ones with high confidence scores. Assign the box's class label as the class
// with the highest score for the box.
// 得到每个输出的数据
float* data = (float*)outs[i].data;
for (int j = 0; j < outs[i].rows; ++j, data += outs[i].cols)
{
// 得到每个输出的所有类别的分数
cv::Mat scores = outs[i].row(j).colRange(5, outs[i].cols);
cv::Point classIdPoint;
double confidence;
// Get the value and location of the maximum score
cv::minMaxLoc(scores, 0, &confidence, 0, &classIdPoint);
//if (confidence > g_sys_conf.ConfThresholds[classIdPoint.x]*0.0001)
if (confidence > confThreshold)
{
int centerX = (int)(data[0] * frame.cols);
int centerY = (int)(data[1] * frame.rows);
int width = (int)(data[2] * frame.cols);
int height = (int)(data[3] * frame.rows);
int left = centerX - width / 2;
int top = centerY - height / 2;
classIds[classIdPoint.x].push_back(classIdPoint.x);
confidences[classIdPoint.x].push_back((float)confidence);
boxes[classIdPoint.x].push_back(cv::Rect(left, top, width, height));
}
}
}
// Perform non maximum suppression to eliminate redundant overlapping boxes with
// lower confidences
for (size_t i = 0; i < classes.size(); ++i)
{
std::vector<int> indices;
cv::dnn::NMSBoxes(boxes[i], confidences[i], confThreshold, nmsThreshold, indices);
for (size_t j = 0; j < indices.size(); ++j)
{
int idx = indices[j];
cv::Rect box = boxes[i][idx];
//if (classIds[i][idx] == 2) continue;
//else if (classIds[i][idx] == 3) continue;
drawPred_ng(classIds[i][idx], confidences[i][idx], box.x, box.y,
box.x + box.width, box.y + box.height, frame);
results.push_back(std::make_pair(classIds[i][idx], box));
}
}
}
// Draw the predicted bounding box
void AlgJd_ng::drawPred_ng(int classId, float conf, int left, int top, int right, int bottom, cv::Mat& frame)
{
cv::Scalar color;
if (classId == 0) //jd
color = cv::Scalar(0, 0, 255); // 绿色
else if (classId == 1) // kz
color = cv::Scalar(0, 255, 0); //
else if (classId == 2) // bm
color = cv::Scalar(0, 255, 0);
else if (classId == 3) // kzx
color = cv::Scalar(0, 255, 0);
else
color = cv::Scalar(255, 255, 255); // 白色
//Draw a rectangle displaying the bounding box
cv::rectangle(frame, cv::Point(left, top), cv::Point(right, bottom), color, 4);
//Get the label for the class name and its confidence
std::string label = cv::format("%.2f%%", (conf*100));///
if (!classes.empty())
{
CV_Assert(classId < (int)classes.size());
label = classes[classId] + ": " + label;
}
else
{
std::cout<<"classes is empty..."<<std::endl;
}
//Display the label at the top of the bounding box
int baseLine;
cv::Size labelSize = cv::getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 1, 1, &baseLine);
top = std::max(top, labelSize.height);
cv::putText(frame, label, cv::Point(left, top-5), cv::FONT_HERSHEY_SIMPLEX, 0.8, color, 1);
}
void AlgJd_ng::detect_batch_ng(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat> &vec_out, std::vector<std::vector<std::pair<int, cv::Rect>>> &vec_results)
{
cv::Mat blobs;
std::vector<cv::Mat> image;
cv::dnn::blobFromImages(vec_in, blobs, 1 / 255.0, cv::Size(inpWidth, inpHeight), cv::Scalar(0, 0, 0), true, false);
//Sets the input to the network
net_ng.setInput(blobs);
// Runs the forward pass to get output of the output layers
std::vector<cv::Mat> outs;
net_ng.forward(outs, getOutputsNames_ng(net_ng));
for (int i = 0; i < vec_in.size(); i++)
{
image.push_back(vec_in[i].clone());
}
// Remove the bounding boxes with low confidence
post_process_batch_ng(image, outs, vec_results);
// Write the frame with the detection boxes
for (int i = 0; i < vec_in.size(); i++)
{
cv::Mat detectedFrame, out;
image[i].convertTo(detectedFrame, CV_8U);
out = detectedFrame.clone();
vec_out.push_back(out);
}
}
void AlgJd_ng::detect_batch_ng(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat>& transits, std::vector<cv::Mat>& vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > >& vec_results)
{
cv::Mat blobs;
//std::vector<cv::Mat> image= transits;
cv::dnn::blobFromImages(vec_in, blobs, 1 / 255.0, cv::Size(inpWidth, inpHeight), cv::Scalar(0, 0, 0), true, false);
//Sets the input to the network
net_ng.setInput(blobs);
// Runs the forward pass to get output of the output layers
std::vector<cv::Mat> outs;
net_ng.forward(outs, getOutputsNames_ng(net_ng));
//for (int i = 0; i < vec_in.size(); i++)
//{
// //image.push_back(vec_in[i].clone());
// transits.push_back(vec_in[i].clone());
//}
// Remove the bounding boxes with low confidence
//post_process_batch_ng(image, outs, vec_results);
post_process_batch_ng(transits, outs, vec_results);
// Write the frame with the detection boxes
for (int i = 0; i < vec_in.size(); i++)
{
cv::Mat detectedFrame, out;
//image[i].convertTo(detectedFrame, CV_8U);
transits[i].convertTo(detectedFrame, CV_8U);
out = detectedFrame.clone();
vec_out.push_back(out);
}
}
void AlgJd_ng::post_process_batch_ng(std::vector<cv::Mat>& vec_frame, std::vector<cv::Mat>& outs, std::vector<std::vector<std::pair<int, cv::Rect> > > &vec_results)
{
int batch = vec_frame.size();
double confidence;///
for (int k = 0; k < batch; k++)
{
std::vector < std::vector<int>> classIds(classes.size());
std::vector < std::vector<float>> confidences(classes.size());
std::vector < std::vector<cv::Rect>> boxes(classes.size());
//std::cout << "outs.size()\t" << outs.size() << std::endl;
//std::cout << "Type\t" << outs[0].type() << std::endl;
for (size_t i = 0; i < outs.size(); ++i)
{
//std::cout << "outs.dims\t" << outs[i].dims << std::endl;
//std::cout << "outs[" << i << "].rows\t" << outs[i].size[0] << std::endl;
//std::cout << "outs[" << i << "].cols\t" << outs[i].size[1] << std::endl;
//std::cout << "outs[" << i << "].cols\t" << outs[i].size[2] << std::endl;
// Scan through all the bounding boxes output from the network and keep only the
// ones with high confidence scores. Assign the box's class label as the class
//
cv::Mat m0(outs[i].size[1], outs[i].size[2], CV_32F, outs[i].data + outs[i].step[0] * k);
// with the highest score for the box.
float* data = (float*)m0.data;
for (int j = 0; j < m0.rows; ++j, data += m0.cols)
{
cv::Mat scores = m0.row(j).colRange(5, m0.cols);
cv::Point classIdPoint;
//double confidence;
// Get the value and location of the maximum score
cv::minMaxLoc(scores, 0, &confidence, 0, &classIdPoint);
//if (confidence > g_sys_conf.ConfThresholds[classIdPoint.x]*0.0001)
if (confidence > confThreshold)
{
int centerX = (int)(data[0] * vec_frame[k].cols);
int centerY = (int)(data[1] * vec_frame[k].rows);
int width = (int)(data[2] * vec_frame[k].cols);
int height = (int)(data[3] * vec_frame[k].rows);
int left = centerX - width / 2;
int top = centerY - height / 2;
classIds[classIdPoint.x].push_back(classIdPoint.x);
confidences[classIdPoint.x].push_back((float)confidence);
boxes[classIdPoint.x].push_back(cv::Rect(left, top, width, height));
}
}
}
std::vector<std::pair<int, cv::Rect> > results;
// Perform non maximum suppression to eliminate redundant overlapping boxes with
// lower confidences
for (size_t i = 0; i < classes.size(); ++i)
{
std::vector<int> indices;
cv::dnn::NMSBoxes(boxes[i], confidences[i], confThreshold, nmsThreshold, indices);
for (size_t j = 0; j < indices.size(); ++j)
{
int idx = indices[j];
cv::Rect box = boxes[i][idx];
drawPred_ng(classIds[i][idx], confidences[i][idx], box.x, box.y,
box.x + box.width, box.y + box.height, vec_frame[k]);
results.push_back(std::make_pair(classIds[i][idx], box));
}
}
vec_results.push_back(results);
}
}
//jinhuan+
bool sort_rect_by_x_center_ng(cv::Rect r1, cv::Rect r2)
{
return (r1.x + r1.width / 2) < (r2.x + r2.width / 2);
}
bool sort_rect_by_y_center_ng(cv::Rect r1, cv::Rect r2)
{
return (r1.y + r1.height / 2) < (r2.y + r2.height / 2);
}
bool sort_rect_by_height_ng(cv::Rect r1, cv::Rect r2)
{
return (r1.height) < (r2.height);
}
bool sort_rect_by_width_ng(cv::Rect r1, cv::Rect r2)
{
return (r1.width) < (r2.width);
}
//jinhuan-

@ -1,42 +0,0 @@
#ifndef _CIGARETTE_JD_ng
#define _CIGARETTE_JD_ng
#include <opencv2/opencv.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/dnn/shape_utils.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
#include <fstream>
#include "common.h"
class AlgJd_ng
{
public:
bool init_ng(QString model_path, QString model_name);
bool test_detect_ng();
bool test_detect_batcht_ng(int shoot);
int detect_ng(cv::Mat& in, cv::Mat &draw_frame, cv::Mat &out, std::vector<std::pair<int, cv::Rect>> &results);
int detect_ng(cv::Mat& in, cv::Mat &out, std::vector<std::pair<int, cv::Rect> > &results);
// Remove the bounding boxes with low confidence using non-maxima suppression
void post_process_ng(cv::Mat& frame, std::vector<cv::Mat>& outs, std::vector<std::pair<int, cv::Rect>> &results);
void CircleImagePro_ng(cv::Mat src, cv::Mat dst, std::vector<float> radius);
void detect_batch_ng(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat> &vec_out, std::vector<std::vector<std::pair<int, cv::Rect> > > &vec_results);
void detect_batch_ng(std::vector<cv::Mat>& vec_in, std::vector<cv::Mat> &transits, std::vector<cv::Mat> &vec_out, std::vector<std::vector<std::pair<int, cv::Rect>>> &vec_results);
// Remove the bounding boxes with low confidence using non-maxima suppression
void post_process_batch_ng(std::vector<cv::Mat>& vec_frame, std::vector<cv::Mat>& outs, std::vector<std::vector<std::pair<int, cv::Rect>>> &vec_results);
void analyse_ng(cv::Mat vec_in, std::vector<std::pair<int, cv::Rect> > & vec_results);
// Get the names of the output layers
std::vector<cv::String> getOutputsNames_ng(const cv::dnn::Net& net);
// Draw the predicted bounding box
void drawPred_ng(int classId, float conf, int left, int top, int right, int bottom, cv::Mat& frame);
private:
cv::dnn::Net net_ng;
std::vector<std::string> classes_ng;
};
//jinhuan+
bool sort_rect_by_x_center_ng(cv::Rect r1, cv::Rect r2);
bool sort_rect_by_y_center_ng(cv::Rect r1, cv::Rect r2);
bool sort_rect_by_height_ng(cv::Rect r1, cv::Rect r2);
bool sort_rect_by_width_ng(cv::Rect r1, cv::Rect r2);
//jinhuan-
#endif //end of _CIGARETTE_JD

@ -8,6 +8,7 @@
#include <QtCore\qprocess.h>
#include "exportData.h"
#include <Windows.h>
#include <fstream>
int g_op_time; //操作员权限时长默认300秒
int g_admin_time; //管理员操作权限时长默认300秒
@ -28,10 +29,6 @@ QDateTime g_ts_start;
extern SingleCamInfoStruct SingleCamInfo[NumberOfSupportedCameras];
AlgJd alg_jd[NumberOfSupportedCameras]; //检测胶点的AI算法
#ifdef __DEBUG
AlgJd alg_test;//test AI算法
#endif
QThread* pThread[NumberOfSupportedCameras];
int work_camera_nums;
@ -105,7 +102,7 @@ VOID BeforeWork(int shoot[])
for (int i = 0; i < NumberOfSupportedCameras; i++)
{
if (SingleCamInfo[i].Detect && SingleCamInfo[i].IsOpen) {
alg_jd[i].test_detect_batcht(shoot[i]);
YOLOManager::GetInstance().GetDetector(i)->WarmUpSession(shoot[i]);
}
}
}
@ -253,6 +250,14 @@ Cigarette::Cigarette(QWidget* parent)
QString config_path = g_conf_path.config_path + "/conf.txt";
read_sys_config(g_sys_conf, config_path); //初始化系统配置
if (g_sys_conf.model_path.isEmpty()) {
g_sys_conf.model_path = "D:/model";
}
if (g_sys_conf.model_name.isEmpty()) {
g_sys_conf.model_name = "jd.weights";
}
for (int i = 0; i < NumberOfSupportedCameras; i++) {
#if defined(NumberOfSupportedCameras) && (NumberOfSupportedCameras > i)
g_modbus_conf.kick[i] = 0;
@ -340,28 +345,18 @@ Cigarette::Cigarette(QWidget* parent)
CreatWorkThread(0, 0, this);
#endif
#ifdef __DEBUG
YOLOManager::GetInstance().InitializeDetector(g_sys_conf,NumberOfSupportedCameras);
#endif
//创建相机工作线程
for (int i = 0; i < NumberOfSupportedCameras; i++)
{
if (SingleCamInfo[i].Detect) {
work_camera_nums++;
cam_status_mat[i]->setStyleSheet(tr("background-color: rgb(0, 170, 0);"));
QString model_path, model_name;
if (g_sys_conf.model_path.isEmpty()) {
model_path = "D:/model";
g_sys_conf.model_path = "D:/model";
}
else
model_path = g_sys_conf.model_path;
if (g_sys_conf.model_name.isEmpty()) {
model_name = "jd.weights";
g_sys_conf.model_name = "jd.weights";
}
else
model_name = g_sys_conf.model_name;
if (!alg_jd[i].init(model_path, model_name))
if (!YOLOManager::GetInstance().InitializeDetector(g_sys_conf,i))
{
QMessageBox::information(NULL, QStringLiteral("系统自检失败"), QStringLiteral("AI模型1初始化失败请检查程序完整性"), QMessageBox::Ok);
exit(-1);
@ -417,10 +412,6 @@ Cigarette::Cigarette(QWidget* parent)
}
#ifdef __DEBUG
alg_test.init(g_sys_conf.model_path, g_sys_conf.model_name);//test AI算法
#endif
//自动打开所有相机
if (g_sys_conf.auto_open == 1)
{
@ -856,14 +847,14 @@ void Cigarette::TestImg()
}
std::vector<std::pair<int, cv::Rect> > results;
cv::Mat output;
alg_test.detect(imagein, output, results);
YOLOManager::GetInstance().GetDetector(NumberOfSupportedCameras)->RunSession(imagein, output, results);
std::string WindowName = "TestImg";
cv::namedWindow(WindowName, cv::WINDOW_NORMAL);
cv::imshow(WindowName, output);
cv::waitKeyEx(1);
#ifdef __ExportData
alg_test.analyse(imagein, results);
#endif
// #ifdef __ExportData
// alg_test.analyse(imagein, results);
// #endif
}
void Cigarette::TestImgs()
@ -893,15 +884,15 @@ void Cigarette::TestImgs()
cv::Mat output;
std::vector<std::pair<int, cv::Rect> > results;
alg_test.detect(imagein, output, results);
YOLOManager::GetInstance().GetDetector(NumberOfSupportedCameras)->RunSession(imagein, output, results);
std::string WindowName = "TestImg";
cv::namedWindow(WindowName, cv::WINDOW_NORMAL);
cv::imshow(WindowName, output);
int k = cv::waitKeyEx(1);
if (k == 27)break;//ESC键
#ifdef __ExportData
alg_test.analyse(imagein, results);
#endif
// #ifdef __ExportData
// alg_test.analyse(imagein, results);
// #endif
QCoreApplication::processEvents();
}
}
@ -2983,6 +2974,8 @@ bool Cigarette::ControlCamOpenOrClose(int Num, bool OpenOrClose)
ifc.binningHorizontal.write(2);
ifc.binningVerticalMode.writeS("Sum");
ifc.binningVertical.write(2);
ifc.height.write(IMAGE_HEIGHT);
ifc.width.write(IMAGE_WIDTH);
pFI[Num] = new FunctionInterface(pDev);
if (pDev->interfaceLayout.isValid() && (pDev->interfaceLayout.read() == dilGenICam))
@ -3120,6 +3113,9 @@ bool Cigarette::ControlCamOpenOrClose(int Num, bool OpenOrClose)
baslerCamera->BinningHorizontal.SetValue(2);
baslerCamera->BinningVertical.SetValue(2);
baslerCamera->Height.SetValue(IMAGE_HEIGHT);
baslerCamera->Width.SetValue(IMAGE_WIDTH);
pBaslerCaptureThread[Num] = new CaptureThreadBasler(baslerCamera, false, Num, g_sys_conf.shoot[Num]);
#ifdef SYNC_CAMERA
pBaslerCaptureThread[Num]->p_image_sync_queue = g_image_sync_queue;
@ -3251,6 +3247,12 @@ bool Cigarette::ControlCamOpenOrClose(int Num, bool OpenOrClose)
nRet = MV_CC_SetFloatValue(camhandle, "Gain", g_sys_conf.gain[Num]);
if (nRet) { std::cout << "can not set Gain" << std::endl; nnRet = nRet; }
nRet = MV_CC_SetHeight(camhandle, IMAGE_HEIGHT);
if (nRet) { std::cout << "can not MV_CC_SetHeight" << std::endl; nnRet = nRet; }
nRet = MV_CC_SetWidth(camhandle, IMAGE_WIDTH);
if (nRet) { std::cout << "can not MV_CC_SetWidth" << std::endl; nnRet = nRet; }
pHIKCaptureThread[Num] = new CaptureThreadHIK(camhandle, false, Num);
#ifdef SYNC_CAMERA
pHIKCaptureThread[Num]->p_image_sync_queue = g_image_sync_queue;

@ -6,7 +6,7 @@
#include "dialogin.hpp"
#include "dialogsetup.hpp"
#include "plcsetup.hpp"
#include "alg_jd.h"
#include "YOLOManager.h"
#include "balluffcamera.h"
#include "baslercamera.h"
#include "hikcamera.h"

@ -5,6 +5,7 @@
#include "basecamera.h"
#include "QtCore\qdatetime.h"
#define USE_CUDA
//#define __DEBUG //debug信息输出功能
#define LOG_RECORD //log日志功能
#ifdef LOG_RECORD
@ -64,11 +65,15 @@
#define STATISTIC_FILE "camera%1_statistic.txt"
#define ALARM_RECORD_FILE "alarm.txt"
#define OUTPUT_HIGH_WIDTH 20000 //输出信号的脉冲宽度,微秒
#define SUFFIX ".jpg"
#define OP_TIME 300 //OP权限时长默认300秒
#define ADMIN_TIME 600 //ADMIN权限时长默认300秒
#define STOP_SECONDS 3 //检查多少次不变触发自动换班
#define IMAGE_HEIGHT 512
#define IMAGE_WIDTH 640
int string_split(std::string str, std::string pattern, std::vector<std::string>& out);
std::string format(const char* pszFmt, ...);
void getFiles(std::string path, std::vector<std::string>& files);

@ -3,8 +3,12 @@
<PropertyGroup />
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<QtLastBackgroundBuild>2024-04-07T16:40:53.3929255Z</QtLastBackgroundBuild>
<QtTouchProperty>
</QtTouchProperty>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<QtLastBackgroundBuild>2023-05-12T00:20:07.0175398Z</QtLastBackgroundBuild>
<QtTouchProperty>
</QtTouchProperty>
</PropertyGroup>
</Project>
Loading…
Cancel
Save