GrabBag/App/Workpiece/WorkpieceApp/Presenter/Src/WorkpiecePresenter.cpp

1120 lines
42 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "WorkpiecePresenter.h"
#include "VrError.h"
#include "VrLog.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QFileInfo>
#include <QtCore/QDir>
#include <QtCore/QString>
#include <QtCore/QStandardPaths>
#include <QtCore/QFile>
#include <QtCore/QDateTime>
#include <cmath>
#include <algorithm>
#include <QImage>
#include <QThread>
#include <atomic>
#include <QJsonObject>
#include <QJsonArray>
#include "Version.h"
#include "VrTimeUtils.h"
#include "VrDateUtils.h"
#include "SG_baseDataType.h"
#include "VrConvert.h"
#include "TCPServerProtocol.h"
#include "DetectPresenter.h"
#include "PathManager.h"
WorkpiecePresenter::WorkpiecePresenter(QObject *parent)
: QObject(parent)
, m_vrConfig(nullptr)
, m_pStatus(nullptr)
, m_pDetectPresenter(nullptr)
, m_pTCPServer(nullptr)
, m_bCameraConnected(false)
, m_bTCPConnected(false)
, m_currentWorkStatus(WorkStatus::Error)
{
m_detectionDataCache.clear();
}
WorkpiecePresenter::~WorkpiecePresenter()
{
// 停止配置管理器
if (m_pConfigManager) {
m_pConfigManager->Shutdown();
m_pConfigManager.reset();
}
// 停止算法检测线程
m_bAlgoDetectThreadRunning = false;
m_algoDetectCondition.notify_all();
// 等待算法检测线程结束
if (m_algoDetectThread.joinable()) {
m_algoDetectThread.join();
}
// 释放缓存的检测数据
_ClearDetectionDataCache();
// 释放TCP服务器
if (m_pTCPServer) {
m_pTCPServer->Deinitialize();
delete m_pTCPServer;
m_pTCPServer = nullptr;
}
// 释放相机设备资源
for(auto it = m_vrEyeDeviceList.begin(); it != m_vrEyeDeviceList.end(); it++)
{
if (it->second != nullptr) {
it->second->CloseDevice();
delete it->second;
it->second = nullptr;
}
}
m_vrEyeDeviceList.clear();
// 释放检测处理器
if(m_pDetectPresenter)
{
delete m_pDetectPresenter;
m_pDetectPresenter = nullptr;
}
// 释放配置对象
if (m_vrConfig) {
delete m_vrConfig;
m_vrConfig = nullptr;
}
}
int WorkpiecePresenter::Init()
{
LOG_DEBUG("Start APP Version: %s\n", WORKPIECE_FULL_VERSION_STRING);
// 初始化连接状态
m_bCameraConnected = false;
m_currentWorkStatus = WorkStatus::InitIng;
m_pStatus->OnWorkStatusChanged(m_currentWorkStatus);
m_pDetectPresenter = new DetectPresenter();
// 获取配置文件路径
QString configPath = PathManager::GetConfigFilePath();
int nRet = SUCCESS;
// 初始化配置管理器ConfigManager 内部会创建 IVrConfig 实例)
m_pConfigManager = std::make_unique<ConfigManager>();
if (!m_pConfigManager->Initialize(configPath.toStdString())) {
LOG_ERROR("Failed to initialize ConfigManager\n");
m_pStatus->OnStatusUpdate("配置管理器初始化失败");
return ERR_CODE(DEV_OPEN_ERR);
}
LOG_INFO("ConfigManager initialized successfully\n");
// 从 ConfigManager 获取配置结果
ConfigResult configResult = m_pConfigManager->GetConfigResult();
m_projectType = configResult.projectType;
// 获取 IVrConfig 实例用于配置改变通知
m_vrConfig = m_pConfigManager->GetVrConfigInstance();
// 设置配置改变通知回调
if (m_vrConfig) {
m_vrConfig->SetConfigChangeNotify(this);
}
// 初始化算法参数
nRet = InitAlgorithmParams();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("算法参数初始化失败");
LOG_ERROR("Algorithm parameters initialization failed with error: %d\n", nRet);
} else {
m_pStatus->OnStatusUpdate("算法参数初始化成功");
LOG_INFO("Algorithm parameters initialization successful\n");
}
InitCamera(configResult.cameraList);
LOG_INFO("Camera initialization completed. Connected cameras: %zu, default camera index: %d\n",
m_vrEyeDeviceList.size(), m_currentCameraIndex);
// 初始化TCP服务器
nRet = InitTCPServer();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("TCP服务器初始化失败");
m_bTCPConnected = false;
} else {
m_pStatus->OnStatusUpdate("TCP服务器初始化成功");
}
m_bAlgoDetectThreadRunning = true;
m_algoDetectThread = std::thread(&WorkpiecePresenter::_AlgoDetectThread, this);
m_algoDetectThread.detach();
m_pStatus->OnStatusUpdate("设备初始化完成");
CheckAndUpdateWorkStatus();
QString str = QString("%1 配置初始化成功").arg(ProjectTypeToString(configResult.projectType).c_str());
m_pStatus->OnStatusUpdate(str.toStdString());
return SUCCESS;
}
// 相机协议相关方法
int WorkpiecePresenter::InitCamera(std::vector<DeviceInfo>& cameraList)
{
// 通知UI相机个数
int cameraCount = cameraList.size();
// 初始化相机列表,预分配空间
m_vrEyeDeviceList.resize(cameraCount, std::make_pair("", nullptr));
for(int i = 0; i < cameraCount; i++)
{
m_vrEyeDeviceList[i] = std::make_pair(cameraList[i].name, nullptr);
}
m_pStatus->OnCameraCountChanged(cameraCount);
if(cameraCount > 0){
if (cameraCount >= 1) {
// 尝试打开相机
int nRet = _OpenDevice(1, cameraList[0].name.c_str(), cameraList[0].ip.c_str(), m_projectType);
m_pStatus->OnCamera1StatusChanged(nRet == SUCCESS);
m_bCameraConnected = (nRet == SUCCESS);
}
if (cameraCount >= 2) {
// 尝试打开相机
int nRet = _OpenDevice(2, cameraList[1].name.c_str(), cameraList[1].ip.c_str(), m_projectType);
m_pStatus->OnCamera2StatusChanged(nRet == SUCCESS);
m_bCameraConnected = (nRet == SUCCESS);
}
} else {
m_vrEyeDeviceList.resize(1, std::make_pair("", nullptr));
_OpenDevice(1, "相机", nullptr, m_projectType);
}
// 设置默认相机索引为第一个连接的相机
m_currentCameraIndex = 1; // 默认从1开始
for (int i = 0; i < static_cast<int>(m_vrEyeDeviceList.size()); i++) {
if (m_vrEyeDeviceList[i].second != nullptr) {
m_currentCameraIndex = i + 1; // 找到第一个连接的相机
break;
}
}
return SUCCESS;
}
// 初始化算法参数
int WorkpiecePresenter::InitAlgorithmParams()
{
LOG_DEBUG("Start initializing algorithm parameters\n");
QString exePath = QCoreApplication::applicationFilePath();
// 清空现有的手眼标定矩阵列表
m_clibMatrixList.clear();
// 获取手眼标定文件路径并确保文件存在
QString clibPath = PathManager::GetCalibrationFilePath();
LOG_INFO("Loading hand-eye matrices from: %s\n", clibPath.toStdString().c_str());
// 读取存在的矩阵数量
int nExistMatrixNum = CVrConvert::GetClibMatrixCount(clibPath.toStdString().c_str());
LOG_INFO("Found %d hand-eye calibration matrices\n", nExistMatrixNum);
// 循环加载每个矩阵
for(int matrixIndex = 0; matrixIndex < nExistMatrixNum; matrixIndex++)
{
// 构造矩阵标识符
char matrixIdent[64];
#ifdef _WIN32
sprintf_s(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#else
sprintf(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#endif
// 创建新的标定矩阵结构
CalibMatrix calibMatrix;
// 初始化为单位矩阵
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
// 加载矩阵数据
bool loadSuccess = CVrConvert::LoadClibMatrix(clibPath.toStdString().c_str(), matrixIdent, "dCalibMatrix", calibMatrix.clibMatrix);
if(loadSuccess)
{
m_clibMatrixList.push_back(calibMatrix);
LOG_INFO("Successfully loaded matrix %d\n", matrixIndex);
// 输出矩阵内容
QString clibMatrixStr;
LOG_INFO("Matrix %d content:\n", matrixIndex);
for (int i = 0; i < 4; ++i) {
clibMatrixStr.clear();
for (int j = 0; j < 4; ++j) {
clibMatrixStr += QString::asprintf("%8.4f ", calibMatrix.clibMatrix[i * 4 + j]);
}
LOG_INFO(" %s\n", clibMatrixStr.toStdString().c_str());
}
}
else
{
LOG_WARNING("Failed to load matrix %d, using identity matrix\n", matrixIndex);
// 如果加载失败,使用单位矩阵
memcpy(calibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
m_clibMatrixList.push_back(calibMatrix);
}
}
LOG_INFO("Total loaded %zu hand-eye calibration matrices\n", m_clibMatrixList.size());
// 从 ConfigManager 获取已加载的配置(避免重复加载)
ConfigResult configResult = m_pConfigManager->GetConfigResult();
// 如果 ConfigManager 配置无效或 m_vrConfig 为空,尝试直接加载
if (configResult.cameraList.empty() && m_vrConfig) {
LOG_WARNING("ConfigResult from ConfigManager is empty, trying direct load\n");
QString configPath = PathManager::GetConfigFilePath();
LOG_INFO("Loading config: %s\n", configPath.toStdString().c_str());
int ret = m_vrConfig->LoadConfig(configPath.toStdString(), configResult);
if (ret != LOAD_CONFIG_SUCCESS) {
LOG_ERROR("Failed to load config file directly, error code: %d\n", ret);
}
}
const VrAlgorithmParams& xmlParams = configResult.algorithmParams;
// 保存调试参数
m_debugParam = configResult.debugParam;
LOG_INFO("Loaded XML params - Workpiece: lineLen=%.1f\n",
xmlParams.workpieceParam.lineLen);
LOG_INFO("Loaded XML params - Filter: continuityTh=%.1f, outlierTh=%.1f\n",
xmlParams.filterParam.continuityTh, xmlParams.filterParam.outlierTh);
// 直接使用配置结构
m_algorithmParams = xmlParams;
LOG_INFO("projectType: %s\n", ProjectTypeToString(m_projectType).c_str());
LOG_INFO("Algorithm parameters initialized successfully:\n");
LOG_INFO(" Workpiece: lineLen=%.1f\n",
m_algorithmParams.workpieceParam.lineLen);
// 循环打印所有相机的调平参数(添加安全检查)
LOG_INFO("Loading plane calibration parameters for all cameras:\n");
if (!m_algorithmParams.planeCalibParam.cameraCalibParams.empty()) {
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
try {
LOG_INFO("Camera %d (%s) calibration parameters:\n",
cameraParam.cameraIndex, cameraParam.cameraName.c_str());
LOG_INFO(" Is calibrated: %s\n", cameraParam.isCalibrated ? "YES" : "NO");
LOG_INFO(" Plane height: %.3f\n", cameraParam.planeHeight);
LOG_INFO(" Plane calibration matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[0], cameraParam.planeCalib[1], cameraParam.planeCalib[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[3], cameraParam.planeCalib[4], cameraParam.planeCalib[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[6], cameraParam.planeCalib[7], cameraParam.planeCalib[8]);
LOG_INFO(" Inverse rotation matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[0], cameraParam.invRMatrix[1], cameraParam.invRMatrix[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[3], cameraParam.invRMatrix[4], cameraParam.invRMatrix[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[6], cameraParam.invRMatrix[7], cameraParam.invRMatrix[8]);
LOG_INFO(" --------------------------------\n");
} catch (const std::exception& e) {
LOG_ERROR("Exception while printing camera calibration parameters: %s\n", e.what());
} catch (...) {
LOG_ERROR("Unknown exception while printing camera calibration parameters\n");
}
}
} else {
LOG_WARNING("No plane calibration parameters found in configuration\n");
}
return SUCCESS;
}
// 手眼标定矩阵管理方法实现
CalibMatrix WorkpiecePresenter::GetClibMatrix(int index) const
{
CalibMatrix clibMatrix;
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
memcpy(clibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
if (index >= 0 && index < static_cast<int>(m_clibMatrixList.size())) {
clibMatrix = m_clibMatrixList[index];
memcpy(clibMatrix.clibMatrix, m_clibMatrixList[index].clibMatrix, sizeof(initClibMatrix));
} else {
LOG_WARNING("Invalid hand-eye calibration matrix\n");
}
return clibMatrix;
}
void WorkpiecePresenter::SetStatusCallback(IYWorkpieceStatus* status)
{
m_pStatus = status;
}
// 模拟检测函数,用于演示
int WorkpiecePresenter::StartDetection(int cameraIdx, bool isAuto)
{
LOG_INFO("--------------------------------\n");
LOG_INFO("Start detection with camera index: %d\n", cameraIdx);
// 检查设备状态是否准备就绪
if (isAuto && m_currentWorkStatus != WorkStatus::Ready) {
LOG_INFO("Device not ready, cannot start detection\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("设备未准备就绪,无法开始检测");
}
return ERR_CODE(DEV_BUSY);
}
// 保存当前使用的相机ID从1开始编号
if(-1 != cameraIdx){
m_currentCameraIndex = cameraIdx;
}
int cameraIndex = m_currentCameraIndex;
m_currentWorkStatus = WorkStatus::Working;
// 通知UI工作状态变更为"正在工作"
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
}
// 设置机械臂工作状态为忙碌
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return ERR_CODE(DEV_NOT_FIND);
}
// 清空检测数据缓存(释放之前的内存)
_ClearDetectionDataCache();
int nRet = SUCCESS;
// 根据参数决定启动哪些相机
// 启动指定相机cameraIndex为相机ID从1开始编号
int arrayIndex = cameraIndex - 1; // 转换为数组索引从0开始
// 检查相机是否连接
if (arrayIndex < 0 || arrayIndex >= static_cast<int>(m_vrEyeDeviceList.size()) || m_vrEyeDeviceList[arrayIndex].second == nullptr) {
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
QString cameraName = (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("%1 未连接").arg(cameraName).toStdString());
return ERR_CODE(DEV_NOT_FIND);
}
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[arrayIndex].second;
EVzResultDataType eDataType = keResultDataType_Position;
if(m_projectType == ProjectType::DirectBag){
eDataType = keResultDataType_PointXYZRGBA;
}
pDevice->SetStatusCallback(&WorkpiecePresenter::_StaticCameraNotify, this);
// 开始
nRet = pDevice->StartDetect(&WorkpiecePresenter::_StaticDetectionCallback, eDataType, this);
LOG_INFO("Camera ID %d start detection nRet: %d\n", cameraIndex, nRet);
if (nRet == SUCCESS) {
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测成功").arg(cameraName).toStdString());
} else {
LOG_ERROR("Camera ID %d start detection failed with error: %d\n", cameraIndex, nRet);
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测失败[%d]").arg(cameraName).arg(nRet).toStdString());
}
} else {
LOG_ERROR("Invalid camera ID: %d, valid range is 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
m_pStatus->OnStatusUpdate(QString("无效的相机ID: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
nRet = ERR_CODE(DEV_NOT_FIND);
}
return nRet;
}
int WorkpiecePresenter::StopDetection()
{
LOG_INFO("Stop detection\n");
// 停止所有相机的检测
for (size_t i = 0; i < m_vrEyeDeviceList.size(); ++i) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
int ret = pDevice->StopDetect();
if (ret == 0) {
LOG_INFO("Camera %zu stop detection successfully\n", i + 1);
} else {
LOG_WARNING("Camera %zu stop detection failed, error code: %d\n", i + 1, ret);
}
}
}
// 通知UI工作状态变更为"就绪"(如果设备连接正常)
if (m_pStatus) {
// 检查设备连接状态,决定停止后的状态
if (m_bCameraConnected) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
m_pStatus->OnStatusUpdate("检测已停止");
}
// 设置机械臂工作状态为空闲
return 0;
}
// 加载调试数据进行检测
int WorkpiecePresenter::LoadDebugDataAndDetect(const std::string& filePath)
{
LOG_INFO("Loading debug data from file: %s\n", filePath.c_str());
m_currentWorkStatus = WorkStatus::Working;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
std::string fileName = QFileInfo(QString::fromStdString(filePath)).fileName().toStdString();
m_pStatus->OnStatusUpdate(QString("加载文件:%1").arg(fileName.c_str()).toStdString());
}
int lineNum = 0;
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = SUCCESS;
// 1. 清空现有的检测数据缓存
_ClearDetectionDataCache();
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 使用统一的LoadLaserScanData接口自动判断文件格式
result = m_dataLoader.LoadLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
}
if (result != SUCCESS) {
LOG_ERROR("Failed to load debug data: %s\n", m_dataLoader.GetLastError().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("调试数据加载失败: " + m_dataLoader.GetLastError());
}
return result;
}
LOG_INFO("Successfully loaded %d lines of debug data\n", lineNum);
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("成功加载 %1 行调试数据").arg(lineNum).toStdString());
}
// 等待检测完成
result = _DetectTask();
return result;
}
// 为所有相机设置状态回调
void WorkpiecePresenter::SetCameraStatusCallback(VzNL_OnNotifyStatusCBEx fNotify, void* param)
{
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
pDevice->SetStatusCallback(fNotify, param);
LOG_DEBUG("Status callback set for camera %zu\n", i + 1);
}
}
}
// 打开相机
int WorkpiecePresenter::_OpenDevice(int cameraIndex, const char* cameraName, const char* cameraIp, ProjectType& projectType)
{
IVrEyeDevice* pDevice = nullptr;
IVrEyeDevice::CreateObject(&pDevice);
int nRet = pDevice->InitDevice();
ERR_CODE_RETURN(nRet);
// 先设置状态回调
nRet = pDevice->SetStatusCallback(&WorkpiecePresenter::_StaticCameraNotify, this);
LOG_DEBUG("SetStatusCallback result: %d\n", nRet);
ERR_CODE_RETURN(nRet);
// 尝试打开相机1
nRet = pDevice->OpenDevice(cameraIp, ProjectType::DirectBag == projectType);
// 通过回调更新相机1状态
bool cameraConnected = (SUCCESS == nRet);
if(!cameraConnected){
delete pDevice; // 释放失败的设备
pDevice = nullptr;
}
int arrIdx = cameraIndex - 1;
if(m_vrEyeDeviceList.size() > arrIdx){
m_vrEyeDeviceList[arrIdx] = std::make_pair(cameraName, pDevice); // 直接存储到索引0
}
m_pStatus->OnCamera1StatusChanged(cameraConnected);
m_pStatus->OnStatusUpdate(cameraConnected ? "相机连接成功" : "相机连接失败");
m_bCameraConnected = cameraConnected;
return nRet;
}
// 判断是否可以开始检测
bool WorkpiecePresenter::_SinglePreDetection(int cameraIndex)
{
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return false;
}
if(cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())){
LOG_ERROR("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
return false;
}
if(m_vrEyeDeviceList[cameraIndex - 1].second == nullptr){
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
return false;
}
return true;
}
int WorkpiecePresenter::_SingleDetection(int cameraIndex, bool isStart)
{
int nRet = SUCCESS;
if (isStart) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,启动%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StartDetection(cameraIndex);
} else {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,停止%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StopDetection();
}
return nRet;
}
// 静态回调函数实现
void WorkpiecePresenter::_StaticCameraNotify(EVzDeviceWorkStatus eStatus, void* pExtData, unsigned int nDataLength, void* pInfoParam)
{
// 从pInfoParam获取this指针转换回WorkpiecePresenter*类型
WorkpiecePresenter* pThis = reinterpret_cast<WorkpiecePresenter*>(pInfoParam);
if (pThis)
{
// 调用实例的非静态成员函数
pThis->_CameraNotify(eStatus, pExtData, nDataLength, pInfoParam);
}
}
void WorkpiecePresenter::_CameraNotify(EVzDeviceWorkStatus eStatus, void *pExtData, unsigned int nDataLength, void *pInfoParam)
{
LOG_DEBUG("[Camera Notify] received: status=%d\n", (int)eStatus);
switch (eStatus) {
case EVzDeviceWorkStatus::keDeviceWorkStatus_Offline:
{
LOG_WARNING("[Camera Notify] Camera device offline/disconnected\n");
// 更新相机连接状态
m_bCameraConnected = false;
// 通知UI相机状态变更
if (m_pStatus) {
// 这里需要判断是哪个相机离线暂时更新相机1状态
// 实际应用中可能需要通过pInfoParam或其他方式区分具体哪个相机
m_pStatus->OnCamera1StatusChanged(false);
m_pStatus->OnStatusUpdate("相机设备离线");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Eye_Reconnect:
{
LOG_INFO("[Camera Notify] Camera device online/connected\n");
// 更新相机连接状态
m_bCameraConnected = true;
// 通知UI相机状态变更
if (m_pStatus) {
m_pStatus->OnCamera1StatusChanged(true);
m_pStatus->OnStatusUpdate("相机设备已连接");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Device_Swing_Finish:
{
LOG_INFO("[Camera Notify] Received scan finish signal from camera\n");
// 发送页面提示信息
if (m_pStatus) {
m_pStatus->OnStatusUpdate("相机扫描完成,开始数据处理...");
}
// 通知检测线程开始处理
m_algoDetectCondition.notify_one();
break;
}
default:
break;
}
}
// 检测数据回调函数静态版本
void WorkpiecePresenter::_StaticDetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
WorkpiecePresenter* pThis = reinterpret_cast<WorkpiecePresenter*>(pUserData);
if (pThis) {
pThis->_DetectionCallback(eDataType, pLaserLinePoint, pUserData);
}
}
// 检测数据回调函数实例版本
void WorkpiecePresenter::_DetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
if (!pLaserLinePoint) {
LOG_WARNING("[Detection Callback] pLaserLinePoint is null\n");
return;
}
if (pLaserLinePoint->nPointCount <= 0) {
LOG_WARNING("[Detection Callback] Point count is zero or negative: %d\n", pLaserLinePoint->nPointCount);
return;
}
if (!pLaserLinePoint->p3DPoint) {
LOG_WARNING("[Detection Callback] p3DPoint is null\n");
return;
}
// 直接存储SVzLaserLineData到统一缓存中
SVzLaserLineData lineData;
memset(&lineData, 0, sizeof(SVzLaserLineData));
// 根据数据类型分配和复制点云数据
if (eDataType == keResultDataType_Position) {
// 复制SVzNL3DPosition数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNL3DPosition[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNL3DPosition) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DPosition[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DPosition) * pLaserLinePoint->nPointCount);
}
}
} else if (eDataType == keResultDataType_PointXYZRGBA) {
// 复制SVzNLPointXYZRGBA数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNLPointXYZRGBA[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNLPointXYZRGBA) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DLRPoint[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DLRPoint) * pLaserLinePoint->nPointCount);
}
}
}
lineData.nPointCount = pLaserLinePoint->nPointCount;
lineData.llTimeStamp = pLaserLinePoint->llTimeStamp;
lineData.llFrameIdx = pLaserLinePoint->llFrameIdx;
lineData.nEncodeNo = pLaserLinePoint->nEncodeNo;
lineData.fSwingAngle = pLaserLinePoint->fSwingAngle;
lineData.bEndOnceScan = pLaserLinePoint->bEndOnceScan;
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
m_detectionDataCache.push_back(std::make_pair(eDataType, lineData));
}
void WorkpiecePresenter::CheckAndUpdateWorkStatus()
{
if (m_bCameraConnected) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
void WorkpiecePresenter::_AlgoDetectThread()
{
while(m_bAlgoDetectThreadRunning)
{
std::unique_lock<std::mutex> lock(m_algoDetectMutex);
m_algoDetectCondition.wait(lock, [this]() {
return m_currentWorkStatus == WorkStatus::Working;
});
if(!m_bAlgoDetectThreadRunning){
break;
}
// 检查设备状态是否准备就绪
int nRet = _DetectTask();
LOG_ERROR("DetectTask result: %d\n", nRet);
if(nRet != SUCCESS){
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
LOG_DEBUG("Algo Thread end\n");
m_currentWorkStatus = WorkStatus::Ready;
}
}
int WorkpiecePresenter::_DetectTask()
{
LOG_INFO("[Algo Thread] Start real detection task using algorithm\n");
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 1. 获取缓存的点云数据
if (m_detectionDataCache.empty()) {
LOG_WARNING("No cached detection data available\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("无缓存的检测数据");
}
return ERR_CODE(DEV_DATA_INVALID);
}
// 2. 准备算法输入数据
unsigned int lineNum = 0;
lineNum = m_detectionDataCache.size();
if(m_pStatus){
m_pStatus->OnStatusUpdate("扫描线数:" + std::to_string(lineNum) + ",正在算法检测...");
}
CVrTimeUtils oTimeUtils;
// 获取当前使用的手眼标定矩阵
const CalibMatrix currentClibMatrix = GetClibMatrix(m_currentCameraIndex - 1);
DetectionResult detectionResult;
int nRet = m_pDetectPresenter->DetectWorkpiece(m_currentCameraIndex, m_detectionDataCache,
m_algorithmParams, m_debugParam, m_dataLoader,
currentClibMatrix.clibMatrix, detectionResult);
// 根据项目类型选择处理方式
if (m_pStatus) {
QString err = QString("错误:%1").arg(nRet);
m_pStatus->OnStatusUpdate(QString("检测%1").arg(SUCCESS == nRet ? "成功": err).toStdString());
}
LOG_INFO("[Algo Thread] sx_getWorkpiecePostion detected %zu objects time : %.2f ms\n", detectionResult.positions.size(), oTimeUtils.GetElapsedTimeInMilliSec());
ERR_CODE_RETURN(nRet);
// 8. 返回检测结果
detectionResult.cameraIndex = m_currentCameraIndex;
// 调用检测结果回调函数
m_pStatus->OnDetectionResult(detectionResult);
// 更新状态
QString statusMsg = QString("检测完成,发现%1个目标").arg(detectionResult.positions.size());
m_pStatus->OnStatusUpdate(statusMsg.toStdString());
// 发送检测结果给TCP客户端
_SendDetectionResultToTCP(detectionResult, m_currentCameraIndex);
// 9. 检测完成后,将工作状态更新为"完成"
if (m_pStatus) {
m_currentWorkStatus = WorkStatus::Completed;
m_pStatus->OnWorkStatusChanged(WorkStatus::Completed);
}
// 恢复到就绪状态
m_currentWorkStatus = WorkStatus::Ready;
return SUCCESS;
}
// 释放缓存的检测数据
void WorkpiecePresenter::_ClearDetectionDataCache()
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 释放加载的数据
m_dataLoader.FreeLaserScanData(m_detectionDataCache);
LOG_DEBUG("Detection data cache cleared successfully\n");
}
// 实现配置改变通知接口
void WorkpiecePresenter::OnConfigChanged(const ConfigResult& configResult)
{
LOG_INFO("Configuration changed notification received, reloading algorithm parameters\n");
// 更新调试参数
m_debugParam = configResult.debugParam;
// 重新初始化算法参数
int result = InitAlgorithmParams();
if (result == SUCCESS) {
LOG_INFO("Algorithm parameters reloaded successfully after config change\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置已更新,算法参数重新加载成功");
}
} else {
LOG_ERROR("Failed to reload algorithm parameters after config change, error: %d\n", result);
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置更新后算法参数重新加载失败");
}
}
}
// 根据相机索引获取调平参数
SSG_planeCalibPara WorkpiecePresenter::_GetCameraCalibParam(int cameraIndex)
{
// 查找指定相机索引的调平参数
SSG_planeCalibPara calibParam;
// 使用单位矩阵(未校准状态)
double identityMatrix[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = identityMatrix[i];
calibParam.invRMatrix[i] = identityMatrix[i];
}
calibParam.planeHeight = -1.0; // 使用默认高度
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
if (cameraParam.cameraIndex == cameraIndex) {
// 根据isCalibrated标志决定使用标定矩阵还是单位矩阵
if (cameraParam.isCalibrated) {
// 使用实际的标定矩阵
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = cameraParam.planeCalib[i];
calibParam.invRMatrix[i] = cameraParam.invRMatrix[i];
}
calibParam.planeHeight = cameraParam.planeHeight;
}
}
}
return calibParam;
}
// 实现IConfigChangeListener接口
void WorkpiecePresenter::OnSystemConfigChanged(const SystemConfig& config)
{
LOG_INFO("System configuration changed, applying new configuration\n");
// 更新内部配置状态
if (m_vrConfig) {
// 可以选择性地重新初始化算法参数
InitAlgorithmParams();
}
if (m_pStatus) {
m_pStatus->OnStatusUpdate("系统配置已更新");
}
}
void WorkpiecePresenter::OnCameraParamChanged(int cameraIndex, const CameraUIParam& cameraParam)
{
LOG_INFO("Camera %d parameters changed: expose=%.2f, gain=%.2f, frameRate=%.2f\n",
cameraIndex, cameraParam.exposeTime, cameraParam.gain, cameraParam.frameRate);
// 应用相机参数到实际设备
if (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[cameraIndex - 1].second;
if (device) {
// 设置曝光时间
if (cameraParam.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(cameraParam.exposeTime);
int ret = device->SetEyeExpose(exposeTime);
if (ret == SUCCESS) {
LOG_INFO("Set expose time %.2f for camera %d successfully\n", cameraParam.exposeTime, cameraIndex);
} else {
LOG_ERROR("Failed to set expose time for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置增益
if (cameraParam.gain > 0) {
unsigned int gain = static_cast<unsigned int>(cameraParam.gain);
int ret = device->SetEyeGain(gain);
if (ret == SUCCESS) {
LOG_INFO("Set gain %.2f for camera %d successfully\n", cameraParam.gain, cameraIndex);
} else {
LOG_ERROR("Failed to set gain for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置帧率
if (cameraParam.frameRate > 0) {
int frameRate = static_cast<int>(cameraParam.frameRate);
int ret = device->SetFrame(frameRate);
if (ret == SUCCESS) {
LOG_INFO("Set frame rate %.2f for camera %d successfully\n", cameraParam.frameRate, cameraIndex);
} else {
LOG_ERROR("Failed to set frame rate for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动速度
if (cameraParam.swingSpeed > 0) {
float swingSpeed = static_cast<float>(cameraParam.swingSpeed);
int ret = device->SetSwingSpeed(swingSpeed);
if (ret == SUCCESS) {
LOG_INFO("Set swing speed %.2f for camera %d successfully\n", cameraParam.swingSpeed, cameraIndex);
} else {
LOG_ERROR("Failed to set swing speed for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动角度
if (cameraParam.swingStartAngle != cameraParam.swingStopAngle) {
float startAngle = static_cast<float>(cameraParam.swingStartAngle);
float stopAngle = static_cast<float>(cameraParam.swingStopAngle);
int ret = device->SetSwingAngle(startAngle, stopAngle);
if (ret == SUCCESS) {
LOG_INFO("Set swing angle %.2f-%.2f for camera %d successfully\n",
cameraParam.swingStartAngle, cameraParam.swingStopAngle, cameraIndex);
} else {
LOG_ERROR("Failed to set swing angle for camera %d, error: %d\n", cameraIndex, ret);
}
}
}
}
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("%1参数已更新").arg(cameraName).toStdString());
}
}
void WorkpiecePresenter::OnAlgorithmParamChanged(const VrAlgorithmParams& algorithmParams)
{
LOG_INFO("Algorithm parameters changed, updating internal configuration\n");
// 直接更新算法参数
m_algorithmParams = algorithmParams;
LOG_INFO("Updated algorithm parameters:\n");
LOG_INFO(" Workpiece: lineLen=%.1f\n",
m_algorithmParams.workpieceParam.lineLen);
LOG_INFO(" TreeGrow: yDeviation_max=%.1f, zDeviation_max=%.1f, maxLineSkipNum=%d\n",
m_algorithmParams.growParam.yDeviation_max, m_algorithmParams.growParam.zDeviation_max, m_algorithmParams.growParam.maxLineSkipNum);
LOG_INFO(" Corner: cornerTh=%.3f, jumpCornerTh_1=%.3f, jumpCornerTh_2=%.3f\n",
m_algorithmParams.cornerParam.cornerTh, m_algorithmParams.cornerParam.jumpCornerTh_1, m_algorithmParams.cornerParam.jumpCornerTh_2);
if (m_pStatus) {
m_pStatus->OnStatusUpdate("算法参数已更新");
}
}
// 设置默认相机索引
void WorkpiecePresenter::SetDefaultCameraIndex(int cameraIndex)
{
LOG_INFO("Setting default camera index from %d to %d\n", m_currentCameraIndex, cameraIndex);
// 验证相机索引的有效性cameraIndex是配置中的索引从1开始
if (cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())) {
LOG_WARNING("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("无效的相机索引: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
}
return;
}
// 更新默认相机索引
m_currentCameraIndex = cameraIndex;
LOG_INFO("Default camera index updated to %d\n", m_currentCameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("设置%1为默认相机").arg(cameraName).toStdString());
}
}
// 保存检测数据到文件(默认实现)
int WorkpiecePresenter::SaveDetectionDataToFile(const std::string& filePath)
{
LOG_INFO("Saving detection data to file: %s\n", filePath.c_str());
if (m_detectionDataCache.empty()) {
LOG_WARNING("No detection data available for saving\n");
return ERR_CODE(DEV_DATA_INVALID);
}
// 保存数据到文件
int lineNum = static_cast<int>(m_detectionDataCache.size());
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = m_dataLoader.SaveLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
if (result == SUCCESS) {
LOG_INFO("Successfully saved %d lines of detection data to file: %s\n", lineNum, filePath.c_str());
} else {
LOG_ERROR("Failed to save detection data, error: %s\n", m_dataLoader.GetLastError().c_str());
}
return result;
}