#include "LapWeldPresenter.h" #include "VrError.h" #include "VrLog.h" #include #include #include #include #include #include #include #include #include #include #include #include "Version.h" #include "VrTimeUtils.h" #include "VrDateUtils.h" #include "SX_lapWeldDetection_Export.h" #include "SG_baseDataType.h" #include "VrConvert.h" #include "PointCloudImageUtils.h" LapWeldPresenter::LapWeldPresenter() : m_pConfigManager(nullptr) , m_pStatus(nullptr) , m_pDetectPresenter(nullptr) , m_pTCPServer(nullptr) , m_bCameraConnected(false) , m_bTCPConnected(false) , m_currentWorkStatus(WorkStatus::Error) { m_detectionDataCache.clear(); } LapWeldPresenter::~LapWeldPresenter() { // 停止算法检测线程 m_bAlgoDetectThreadRunning = false; m_algoDetectCondition.notify_all(); // 等待算法检测线程结束 if (m_algoDetectThread.joinable()) { m_algoDetectThread.join(); } // 释放缓存的检测数据 _ClearDetectionDataCache(); // 释放TCP服务器 if (m_pTCPServer) { m_pTCPServer->Deinitialize(); delete m_pTCPServer; m_pTCPServer = nullptr; } // 释放相机设备资源 for(auto it = m_vrEyeDeviceList.begin(); it != m_vrEyeDeviceList.end(); it++) { if (it->second != nullptr) { it->second->CloseDevice(); delete it->second; it->second = nullptr; } } m_vrEyeDeviceList.clear(); // 释放检测处理器 if(m_pDetectPresenter) { delete m_pDetectPresenter; m_pDetectPresenter = nullptr; } // 释放配置管理器 if (m_pConfigManager) { m_pConfigManager->Shutdown(); delete m_pConfigManager; m_pConfigManager = nullptr; } } int LapWeldPresenter::Init() { LOG_DEBUG("Start APP Version: %s\n", LAPWELD_FULL_VERSION_STRING); // 初始化连接状态 m_bCameraConnected = false; m_currentWorkStatus = WorkStatus::InitIng; m_pStatus->OnWorkStatusChanged(m_currentWorkStatus); m_pDetectPresenter = new DetectPresenter(); // 创建 ConfigManager 实例 m_pConfigManager = new ConfigManager(); if (!m_pConfigManager) { LOG_ERROR("Failed to create ConfigManager instance\n"); m_pStatus->OnStatusUpdate("配置管理器创建失败"); return ERR_CODE(DEV_CONFIG_ERR); } // 初始化 ConfigManager if (!m_pConfigManager->Initialize()) { LOG_ERROR("Failed to initialize ConfigManager\n"); m_pStatus->OnStatusUpdate("配置管理器初始化失败"); return ERR_CODE(DEV_CONFIG_ERR); } // 获取配置结果 ConfigResult configResult = m_pConfigManager->GetConfigResult(); m_projectType = configResult.projectType; int nRet = SUCCESS; // 初始化算法参数 nRet = InitAlgorithmParams(); if (nRet != 0) { m_pStatus->OnStatusUpdate("算法参数初始化失败"); LOG_ERROR("Algorithm parameters initialization failed with error: %d\n", nRet); } else { m_pStatus->OnStatusUpdate("算法参数初始化成功"); LOG_INFO("Algorithm parameters initialization successful\n"); } InitCamera(configResult.cameraList); LOG_INFO("Camera initialization completed. Connected cameras: %zu, default camera index: %d\n", m_vrEyeDeviceList.size(), m_currentCameraIndex); // 初始化TCP服务器 nRet = InitTCPServer(); if (nRet != 0) { m_pStatus->OnStatusUpdate("TCP服务器初始化失败"); m_bTCPConnected = false; } else { m_pStatus->OnStatusUpdate("TCP服务器初始化成功"); } m_bAlgoDetectThreadRunning = true; m_algoDetectThread = std::thread(&LapWeldPresenter::_AlgoDetectThread, this); m_algoDetectThread.detach(); m_pStatus->OnStatusUpdate("设备初始化完成"); CheckAndUpdateWorkStatus(); QString str = QString("%1 配置初始化成功").arg(ProjectTypeToString(configResult.projectType).c_str()); m_pStatus->OnStatusUpdate(str.toStdString()); LOG_INFO("Configuration initialized successfully\n"); return SUCCESS; } // TCP服务器初始化 int LapWeldPresenter::InitTCPServer() { LOG_DEBUG("Initializing TCP server\n"); // 创建TCP服务器协议实例 if (m_pTCPServer == nullptr) { m_pTCPServer = new TCPServerProtocol(); } // 初始化TCP服务器(使用默认端口5020) int result = m_pTCPServer->Initialize(5020); if (result != 0) { LOG_ERROR("Failed to initialize TCP server: %d\n", result); return result; } // 设置连接状态回调 m_pTCPServer->SetConnectionCallback([this](bool connected) { this->OnTCPConnectionChanged(connected); }); // 设置检测触发回调 m_pTCPServer->SetDetectionTriggerCallback([this](bool startWork, int cameraIndex, qint64 timestamp) { return this->OnTCPDetectionTrigger(startWork, cameraIndex, timestamp); }); LOG_INFO("TCP server initialized successfully on port 5020\n"); m_bTCPConnected = true; return SUCCESS; } // 相机协议相关方法 int LapWeldPresenter::InitCamera(std::vector& cameraList) { // 通知UI相机个数 int cameraCount = cameraList.size(); // 初始化相机列表,预分配空间 m_vrEyeDeviceList.resize(cameraCount, std::make_pair("", nullptr)); for(int i = 0; i < cameraCount; i++) { m_vrEyeDeviceList[i] = std::make_pair(cameraList[i].name, nullptr); } m_pStatus->OnCameraCountChanged(cameraCount); if(cameraCount > 0){ if (cameraCount >= 1) { // 尝试打开相机 int nRet = _OpenDevice(1, cameraList[0].name.c_str(), cameraList[0].ip.c_str(), m_projectType); m_pStatus->OnCamera1StatusChanged(nRet == SUCCESS); m_bCameraConnected = (nRet == SUCCESS); } if (cameraCount >= 2) { // 尝试打开相机 int nRet = _OpenDevice(2, cameraList[1].name.c_str(), cameraList[1].ip.c_str(), m_projectType); m_pStatus->OnCamera2StatusChanged(nRet == SUCCESS); m_bCameraConnected = (nRet == SUCCESS); } } else { m_vrEyeDeviceList.resize(1, std::make_pair("", nullptr)); _OpenDevice(1, "相机", nullptr, m_projectType); } // 设置默认相机索引为第一个连接的相机 m_currentCameraIndex = 1; // 默认从1开始 for (int i = 0; i < static_cast(m_vrEyeDeviceList.size()); i++) { if (m_vrEyeDeviceList[i].second != nullptr) { m_currentCameraIndex = i + 1; // 找到第一个连接的相机 break; } } return SUCCESS; } // 初始化算法参数 int LapWeldPresenter::InitAlgorithmParams() { LOG_DEBUG("Start initializing algorithm parameters\n"); QString exePath = QCoreApplication::applicationFilePath(); // 清空现有的手眼标定矩阵列表 m_clibMatrixList.clear(); // 获取手眼标定文件路径并确保文件存在 QString clibPath = PathManager::GetInstance().GetCalibrationFilePath(); LOG_INFO("Loading hand-eye matrices from: %s\n", clibPath.toStdString().c_str()); // 读取存在的矩阵数量 int nExistMatrixNum = CVrConvert::GetClibMatrixCount(clibPath.toStdString().c_str()); LOG_INFO("Found %d hand-eye calibration matrices\n", nExistMatrixNum); // 循环加载每个矩阵 for(int matrixIndex = 0; matrixIndex < nExistMatrixNum; matrixIndex++) { // 构造矩阵标识符 char matrixIdent[64]; #ifdef _WIN32 sprintf_s(matrixIdent, "CalibMatrixInfo_%d", matrixIndex); #else sprintf(matrixIdent, "CalibMatrixInfo_%d", matrixIndex); #endif // 创建新的标定矩阵结构 CalibMatrix calibMatrix; // 初始化为单位矩阵 double initClibMatrix[16] = { 1.0, 0.0, 0.0, 0.0, // 第一行 0.0, 1.0, 0.0, 0.0, // 第二行 0.0, 0.0, 1.0, 0.0, // 第三行 0.0, 0.0, 0.0, 1.0 // 第四行 }; // 加载矩阵数据 bool loadSuccess = CVrConvert::LoadClibMatrix(clibPath.toStdString().c_str(), matrixIdent, "dCalibMatrix", calibMatrix.clibMatrix); if(loadSuccess) { m_clibMatrixList.push_back(calibMatrix); LOG_INFO("Successfully loaded matrix %d\n", matrixIndex); // 输出矩阵内容 QString clibMatrixStr; LOG_INFO("Matrix %d content:\n", matrixIndex); for (int i = 0; i < 4; ++i) { clibMatrixStr.clear(); for (int j = 0; j < 4; ++j) { clibMatrixStr += QString::asprintf("%8.4f ", calibMatrix.clibMatrix[i * 4 + j]); } LOG_INFO(" %s\n", clibMatrixStr.toStdString().c_str()); } } else { LOG_WARNING("Failed to load matrix %d, using identity matrix\n", matrixIndex); // 如果加载失败,使用单位矩阵 memcpy(calibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix)); m_clibMatrixList.push_back(calibMatrix); } } LOG_INFO("Total loaded %zu hand-eye calibration matrices\n", m_clibMatrixList.size()); // 从 ConfigManager 获取配置结果 ConfigResult configResult = m_pConfigManager->GetConfigResult(); const VrAlgorithmParams& xmlParams = configResult.algorithmParams; const VrDebugParam& debugParam = configResult.debugParam; LOG_INFO("Loaded XML params - LapWeld: lapHeight=%.1f, weldMinLen=%.1f, weldRefPoints=%d\n", xmlParams.lapWeldParam.lapHeight, xmlParams.lapWeldParam.weldMinLen, xmlParams.lapWeldParam.weldRefPoints); LOG_INFO("Loaded XML params - Filter: continuityTh=%.1f, outlierTh=%d\n", xmlParams.filterParam.continuityTh, xmlParams.filterParam.outlierTh); LOG_INFO("projectType: %s\n", ProjectTypeToString(m_projectType).c_str()); LOG_INFO("Algorithm parameters initialized successfully:\n"); LOG_INFO(" LapWeld: lapHeight=%.1f, weldMinLen=%.1f, weldRefPoints=%d\n", xmlParams.lapWeldParam.lapHeight, xmlParams.lapWeldParam.weldMinLen, xmlParams.lapWeldParam.weldRefPoints); // 循环打印所有相机的调平参数 LOG_INFO("Loading plane calibration parameters for all cameras:\n"); for (const auto& cameraParam : xmlParams.planeCalibParam.cameraCalibParams) { LOG_INFO("Camera %d (%s) calibration parameters:\n", cameraParam.cameraIndex, cameraParam.cameraName.c_str()); LOG_INFO(" Is calibrated: %s\n", cameraParam.isCalibrated ? "YES" : "NO"); LOG_INFO(" Plane height: %.3f\n", cameraParam.planeHeight); LOG_INFO(" Plane calibration matrix:\n"); LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[0], cameraParam.planeCalib[1], cameraParam.planeCalib[2]); LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[3], cameraParam.planeCalib[4], cameraParam.planeCalib[5]); LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[6], cameraParam.planeCalib[7], cameraParam.planeCalib[8]); LOG_INFO(" Inverse rotation matrix:\n"); LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[0], cameraParam.invRMatrix[1], cameraParam.invRMatrix[2]); LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[3], cameraParam.invRMatrix[4], cameraParam.invRMatrix[5]); LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[6], cameraParam.invRMatrix[7], cameraParam.invRMatrix[8]); LOG_INFO(" --------------------------------\n"); } return SUCCESS; } // 手眼标定矩阵管理方法实现 const CalibMatrix LapWeldPresenter::GetClibMatrix(int index) const { CalibMatrix clibMatrix; double initClibMatrix[16] = { 1.0, 0.0, 0.0, 0.0, // 第一行 0.0, 1.0, 0.0, 0.0, // 第二行 0.0, 0.0, 1.0, 0.0, // 第三行 0.0, 0.0, 0.0, 1.0 // 第四行 }; memcpy(clibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix)); if (index >= 0 && index < static_cast(m_clibMatrixList.size())) { clibMatrix = m_clibMatrixList[index]; memcpy(clibMatrix.clibMatrix, m_clibMatrixList[index].clibMatrix, sizeof(initClibMatrix)); } else { LOG_WARNING("Invalid hand-eye calibration matrix\n"); } return clibMatrix; } void LapWeldPresenter::SetStatusCallback(IYLapWeldStatus* status) { m_pStatus = status; } // TCP服务器回调函数实现 void LapWeldPresenter::OnTCPConnectionChanged(bool connected) { LOG_INFO("TCP connection status changed: %s\n", (connected ? "connected" : "disconnected")); // 更新TCP连接状态 m_bTCPConnected = connected; if (m_pStatus) { // 发送详细的页面状态信息 if (connected) { m_pStatus->OnStatusUpdate("TCP客户端连接成功,通信正常"); } else { m_pStatus->OnStatusUpdate("TCP客户端连接断开,等待重新连接"); } } // 检查并更新工作状态 CheckAndUpdateWorkStatus(); } bool LapWeldPresenter::OnTCPDetectionTrigger(bool startWork, int cameraIndex, qint64 timestamp) { LOG_INFO("Received TCP detection trigger: %s for camera index: %d, timestamp: %lld\n", (startWork ? "start work" : "stop work"), cameraIndex, timestamp); if (startWork) { // 启动检测 int result = StartDetection(cameraIndex, false); // 手动触发模式 if (result == SUCCESS) { LOG_INFO("Detection started successfully via TCP trigger\n"); return true; } else { LOG_ERROR("Failed to start detection via TCP trigger, error: %d\n", result); return false; } } else { // 停止检测 int result = StopDetection(); if (result == SUCCESS) { LOG_INFO("Detection stopped successfully via TCP trigger\n"); return true; } else { LOG_ERROR("Failed to stop detection via TCP trigger, error: %d\n", result); return false; } } } // 模拟检测函数,用于演示 int LapWeldPresenter::StartDetection(int cameraIdx, bool isAuto) { LOG_INFO("--------------------------------\n"); LOG_INFO("Start detection with camera index: %d\n", cameraIdx); // 检查设备状态是否准备就绪 if (isAuto && m_currentWorkStatus != WorkStatus::Ready) { LOG_INFO("Device not ready, cannot start detection\n"); if (m_pStatus) { m_pStatus->OnStatusUpdate("设备未准备就绪,无法开始检测"); } return ERR_CODE(DEV_BUSY); } // 保存当前使用的相机ID(从1开始编号) if(-1 != cameraIdx){ m_currentCameraIndex = cameraIdx; } int cameraIndex = m_currentCameraIndex; m_currentWorkStatus = WorkStatus::Working; // 通知UI工作状态变更为"正在工作" if (m_pStatus) { m_pStatus->OnWorkStatusChanged(WorkStatus::Working); } // 设置机械臂工作状态为忙碌 if(m_vrEyeDeviceList.empty()){ LOG_ERROR("No camera device found\n"); if (m_pStatus) { m_pStatus->OnStatusUpdate("未找到相机设备"); } return ERR_CODE(DEV_NOT_FIND); } // 清空检测数据缓存(释放之前的内存) _ClearDetectionDataCache(); int nRet = SUCCESS; // 根据参数决定启动哪些相机 // 启动指定相机(cameraIndex为相机ID,从1开始编号) int arrayIndex = cameraIndex - 1; // 转换为数组索引(从0开始) // 检查相机是否连接 if (arrayIndex < 0 || arrayIndex >= static_cast(m_vrEyeDeviceList.size()) || m_vrEyeDeviceList[arrayIndex].second == nullptr) { LOG_ERROR("Camera %d is not connected\n", cameraIndex); QString cameraName = (arrayIndex >= 0 && arrayIndex < static_cast(m_vrEyeDeviceList.size())) ? QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first) : QString("相机%1").arg(cameraIndex); m_pStatus->OnStatusUpdate(QString("%1 未连接").arg(cameraName).toStdString()); return ERR_CODE(DEV_NOT_FIND); } if (arrayIndex >= 0 && arrayIndex < static_cast(m_vrEyeDeviceList.size())) { IVrEyeDevice* pDevice = m_vrEyeDeviceList[arrayIndex].second; EVzResultDataType eDataType = keResultDataType_Position; if(m_projectType == ProjectType::DirectBag){ eDataType = keResultDataType_PointXYZRGBA; } pDevice->SetStatusCallback(&LapWeldPresenter::_StaticCameraNotify, this); // 开始 nRet = pDevice->StartDetect(&LapWeldPresenter::_StaticDetectionCallback, eDataType, this); LOG_INFO("Camera ID %d start detection nRet: %d\n", cameraIndex, nRet); if (nRet == SUCCESS) { QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first); m_pStatus->OnStatusUpdate(QString("启动%1检测成功").arg(cameraName).toStdString()); } else { LOG_ERROR("Camera ID %d start detection failed with error: %d\n", cameraIndex, nRet); QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first); m_pStatus->OnStatusUpdate(QString("启动%1检测失败[%d]").arg(cameraName).arg(nRet).toStdString()); } } else { LOG_ERROR("Invalid camera ID: %d, valid range is 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size()); m_pStatus->OnStatusUpdate(QString("无效的相机ID: %1,有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString()); nRet = ERR_CODE(DEV_NOT_FIND); } return nRet; } int LapWeldPresenter::StopDetection() { LOG_INFO("Stop detection\n"); // 停止所有相机的检测 for (size_t i = 0; i < m_vrEyeDeviceList.size(); ++i) { IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second; if (pDevice) { int ret = pDevice->StopDetect(); if (ret == 0) { LOG_INFO("Camera %zu stop detection successfully\n", i + 1); } else { LOG_WARNING("Camera %zu stop detection failed, error code: %d\n", i + 1, ret); } } } // 通知UI工作状态变更为"就绪"(如果设备连接正常) if (m_pStatus) { // 检查设备连接状态,决定停止后的状态 if (m_bCameraConnected) { m_currentWorkStatus = WorkStatus::Ready; m_pStatus->OnWorkStatusChanged(WorkStatus::Ready); } else { m_currentWorkStatus = WorkStatus::Error; m_pStatus->OnWorkStatusChanged(WorkStatus::Error); } m_pStatus->OnStatusUpdate("检测已停止"); } // 设置机械臂工作状态为空闲 return 0; } // 加载调试数据进行检测 int LapWeldPresenter::LoadDebugDataAndDetect(const std::string& filePath) { LOG_INFO("Loading debug data from file: %s\n", filePath.c_str()); m_currentWorkStatus = WorkStatus::Working; if (m_pStatus) { m_pStatus->OnWorkStatusChanged(WorkStatus::Working); std::string fileName = QFileInfo(QString::fromStdString(filePath)).fileName().toStdString(); m_pStatus->OnStatusUpdate(QString("加载文件:%1").arg(fileName.c_str()).toStdString()); } // 设置机械臂工作状态为忙碌 int lineNum = 0; float scanSpeed = 0.0f; int maxTimeStamp = 0; int clockPerSecond = 0; int result = SUCCESS; // 1. 清空现有的检测数据缓存 _ClearDetectionDataCache(); { std::lock_guard lock(m_detectionDataMutex); // 使用统一的LoadLaserScanData接口,自动判断文件格式 result = m_dataLoader.LoadLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond); } if (result != SUCCESS) { LOG_ERROR("Failed to load debug data: %s\n", m_dataLoader.GetLastError().c_str()); if (m_pStatus) { m_pStatus->OnStatusUpdate("调试数据加载失败: " + m_dataLoader.GetLastError()); } return result; } LOG_INFO("Successfully loaded %d lines of debug data\n", lineNum); if (m_pStatus) { m_pStatus->OnStatusUpdate(QString("成功加载 %1 行调试数据").arg(lineNum).toStdString()); } // 等待检测完成 result = _DetectTask(); return result; } // 为所有相机设置状态回调 void LapWeldPresenter::SetCameraStatusCallback(VzNL_OnNotifyStatusCBEx fNotify, void* param) { for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) { IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second; if (pDevice) { pDevice->SetStatusCallback(fNotify, param); LOG_DEBUG("Status callback set for camera %zu\n", i + 1); } } } // 打开相机 int LapWeldPresenter::_OpenDevice(int cameraIndex, const char* cameraName, const char* cameraIp, ProjectType& projectType) { IVrEyeDevice* pDevice = nullptr; IVrEyeDevice::CreateObject(&pDevice); int nRet = pDevice->InitDevice(); ERR_CODE_RETURN(nRet); // 先设置状态回调 nRet = pDevice->SetStatusCallback(&LapWeldPresenter::_StaticCameraNotify, this); LOG_DEBUG("SetStatusCallback result: %d\n", nRet); ERR_CODE_RETURN(nRet); // 尝试打开相机1 nRet = pDevice->OpenDevice(cameraIp, ProjectType::DirectBag == projectType); // 通过回调更新相机1状态 bool cameraConnected = (SUCCESS == nRet); if(!cameraConnected){ delete pDevice; // 释放失败的设备 pDevice = nullptr; } int arrIdx = cameraIndex - 1; if(m_vrEyeDeviceList.size() > arrIdx){ m_vrEyeDeviceList[arrIdx] = std::make_pair(cameraName, pDevice); // 直接存储到索引0 } m_pStatus->OnCamera1StatusChanged(cameraConnected); m_pStatus->OnStatusUpdate(cameraConnected ? "相机连接成功" : "相机连接失败"); m_bCameraConnected = cameraConnected; return nRet; } // 判断是否可以开始检测 bool LapWeldPresenter::_SinglePreDetection(int cameraIndex) { if(m_vrEyeDeviceList.empty()){ LOG_ERROR("No camera device found\n"); if (nullptr != m_pStatus) { m_pStatus->OnStatusUpdate("未找到相机设备"); } return false; } if(cameraIndex < 1 || cameraIndex > static_cast(m_vrEyeDeviceList.size())){ LOG_ERROR("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size()); return false; } if(m_vrEyeDeviceList[cameraIndex - 1].second == nullptr){ LOG_ERROR("Camera %d is not connected\n", cameraIndex); return false; } return true; } int LapWeldPresenter::_SingleDetection(int cameraIndex, bool isStart) { int nRet = SUCCESS; if (isStart) { QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast(m_vrEyeDeviceList.size())) ? QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex); QString message = QString("收到信号,启动%1检测").arg(cameraName); if (nullptr != m_pStatus) { m_pStatus->OnStatusUpdate(message.toStdString()); } nRet = StartDetection(cameraIndex); } else { QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast(m_vrEyeDeviceList.size())) ? QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex); QString message = QString("收到信号,停止%1检测").arg(cameraName); if (nullptr != m_pStatus) { m_pStatus->OnStatusUpdate(message.toStdString()); } nRet = StopDetection(); } return nRet; } // 静态回调函数实现 void LapWeldPresenter::_StaticCameraNotify(EVzDeviceWorkStatus eStatus, void* pExtData, unsigned int nDataLength, void* pInfoParam) { // 从pInfoParam获取this指针,转换回LapWeldPresenter*类型 LapWeldPresenter* pThis = reinterpret_cast(pInfoParam); if (pThis) { // 调用实例的非静态成员函数 pThis->_CameraNotify(eStatus, pExtData, nDataLength, pInfoParam); } } void LapWeldPresenter::_CameraNotify(EVzDeviceWorkStatus eStatus, void *pExtData, unsigned int nDataLength, void *pInfoParam) { LOG_DEBUG("[Camera Notify] received: status=%d\n", (int)eStatus); switch (eStatus) { case EVzDeviceWorkStatus::keDeviceWorkStatus_Offline: { LOG_WARNING("[Camera Notify] Camera device offline/disconnected\n"); // 更新相机连接状态 m_bCameraConnected = false; // 通知UI相机状态变更 if (m_pStatus) { // 这里需要判断是哪个相机离线,暂时更新相机1状态 // 实际应用中可能需要通过pInfoParam或其他方式区分具体哪个相机 m_pStatus->OnCamera1StatusChanged(false); m_pStatus->OnStatusUpdate("相机设备离线"); } // 检查并更新工作状态 CheckAndUpdateWorkStatus(); break; } case EVzDeviceWorkStatus::keDeviceWorkStatus_Eye_Reconnect: { LOG_INFO("[Camera Notify] Camera device online/connected\n"); // 更新相机连接状态 m_bCameraConnected = true; // 通知UI相机状态变更 if (m_pStatus) { m_pStatus->OnCamera1StatusChanged(true); m_pStatus->OnStatusUpdate("相机设备已连接"); } // 检查并更新工作状态 CheckAndUpdateWorkStatus(); break; } case EVzDeviceWorkStatus::keDeviceWorkStatus_Device_Swing_Finish: { LOG_INFO("[Camera Notify] Received scan finish signal from camera\n"); // 发送页面提示信息 if (m_pStatus) { m_pStatus->OnStatusUpdate("相机扫描完成,开始数据处理..."); } // 通知检测线程开始处理 m_algoDetectCondition.notify_one(); break; } default: break; } } // 检测数据回调函数静态版本 void LapWeldPresenter::_StaticDetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData) { LapWeldPresenter* pThis = reinterpret_cast(pUserData); if (pThis) { pThis->_DetectionCallback(eDataType, pLaserLinePoint, pUserData); } } // 检测数据回调函数实例版本 void LapWeldPresenter::_DetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData) { if (!pLaserLinePoint) { LOG_WARNING("[Detection Callback] pLaserLinePoint is null\n"); return; } if (pLaserLinePoint->nPointCount <= 0) { LOG_WARNING("[Detection Callback] Point count is zero or negative: %d\n", pLaserLinePoint->nPointCount); return; } if (!pLaserLinePoint->p3DPoint) { LOG_WARNING("[Detection Callback] p3DPoint is null\n"); return; } // 直接存储SVzLaserLineData到统一缓存中 SVzLaserLineData lineData; memset(&lineData, 0, sizeof(SVzLaserLineData)); // 根据数据类型分配和复制点云数据 if (eDataType == keResultDataType_Position) { // 复制SVzNL3DPosition数据 if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) { lineData.p3DPoint = new SVzNL3DPosition[pLaserLinePoint->nPointCount]; if (lineData.p3DPoint) { memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNL3DPosition) * pLaserLinePoint->nPointCount); } lineData.p2DPoint = new SVzNL2DPosition[pLaserLinePoint->nPointCount]; if (lineData.p2DPoint) { memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DPosition) * pLaserLinePoint->nPointCount); } } } else if (eDataType == keResultDataType_PointXYZRGBA) { // 复制SVzNLPointXYZRGBA数据 if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) { lineData.p3DPoint = new SVzNLPointXYZRGBA[pLaserLinePoint->nPointCount]; if (lineData.p3DPoint) { memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNLPointXYZRGBA) * pLaserLinePoint->nPointCount); } lineData.p2DPoint = new SVzNL2DLRPoint[pLaserLinePoint->nPointCount]; if (lineData.p2DPoint) { memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DLRPoint) * pLaserLinePoint->nPointCount); } } } lineData.nPointCount = pLaserLinePoint->nPointCount; lineData.llTimeStamp = pLaserLinePoint->llTimeStamp; lineData.llFrameIdx = pLaserLinePoint->llFrameIdx; lineData.nEncodeNo = pLaserLinePoint->nEncodeNo; lineData.fSwingAngle = pLaserLinePoint->fSwingAngle; lineData.bEndOnceScan = pLaserLinePoint->bEndOnceScan; std::lock_guard lock(m_detectionDataMutex); m_detectionDataCache.push_back(std::make_pair(eDataType, lineData)); } void LapWeldPresenter::CheckAndUpdateWorkStatus() { if (m_bCameraConnected) { m_currentWorkStatus = WorkStatus::Ready; m_pStatus->OnWorkStatusChanged(WorkStatus::Ready); } else { m_currentWorkStatus = WorkStatus::Error; m_pStatus->OnWorkStatusChanged(WorkStatus::Error); } } void LapWeldPresenter::_AlgoDetectThread() { while(m_bAlgoDetectThreadRunning) { std::unique_lock lock(m_algoDetectMutex); m_algoDetectCondition.wait(lock, [this]() { return m_currentWorkStatus == WorkStatus::Working; }); if(!m_bAlgoDetectThreadRunning){ break; } // 检查设备状态是否准备就绪 int nRet = _DetectTask(); LOG_ERROR("DetectTask result: %d\n", nRet); if(nRet != SUCCESS){ m_pStatus->OnWorkStatusChanged(WorkStatus::Error); } LOG_DEBUG("Algo Thread end\n"); m_currentWorkStatus = WorkStatus::Ready; } } int LapWeldPresenter::_DetectTask() { LOG_INFO("[Algo Thread] Start real detection task using algorithm\n"); std::lock_guard lock(m_detectionDataMutex); // 1. 获取缓存的点云数据 if (m_detectionDataCache.empty()) { LOG_WARNING("No cached detection data available\n"); if (m_pStatus) { m_pStatus->OnStatusUpdate("无缓存的检测数据"); } return ERR_CODE(DEV_DATA_INVALID); } // 2. 准备算法输入数据 unsigned int lineNum = 0; lineNum = m_detectionDataCache.size(); if(m_pStatus){ m_pStatus->OnStatusUpdate("扫描线数:" + std::to_string(lineNum) + ",正在算法检测..."); } CVrTimeUtils oTimeUtils; // 获取当前使用的手眼标定矩阵 const CalibMatrix currentClibMatrix = GetClibMatrix(m_currentCameraIndex - 1); // 从 ConfigManager 获取算法参数和调试参数 VrAlgorithmParams algorithmParams = m_pConfigManager->GetAlgorithmParams(); ConfigResult configResult = m_pConfigManager->GetConfigResult(); VrDebugParam debugParam = configResult.debugParam; DetectionResult detectionResult; int nRet = m_pDetectPresenter->DetectLapWeld(m_currentCameraIndex, m_detectionDataCache, algorithmParams, debugParam, m_dataLoader, currentClibMatrix.clibMatrix, detectionResult); // 根据项目类型选择处理方式 if (m_pStatus) { QString err = QString("错误:%1").arg(nRet); m_pStatus->OnStatusUpdate(QString("检测%1").arg(SUCCESS == nRet ? "成功": err).toStdString()); } ERR_CODE_RETURN(nRet); LOG_INFO("[Algo Thread] sx_getLapWeldPostion detected %zu objects time : %.2f ms\n", detectionResult.positions.size(), oTimeUtils.GetElapsedTimeInMilliSec()); // 8. 返回检测结果 detectionResult.cameraIndex = m_currentCameraIndex; // 调用检测结果回调函数 m_pStatus->OnDetectionResult(detectionResult); // 更新状态 QString statusMsg = QString("检测完成,发现%1个目标").arg(detectionResult.positions.size()); m_pStatus->OnStatusUpdate(statusMsg.toStdString()); // 发送检测结果给TCP客户端 _SendDetectionResultToTCP(detectionResult, m_currentCameraIndex); // 9. 检测完成后,将工作状态更新为"完成" if (m_pStatus) { m_currentWorkStatus = WorkStatus::Completed; m_pStatus->OnWorkStatusChanged(WorkStatus::Completed); } // 恢复到就绪状态 m_currentWorkStatus = WorkStatus::Ready; return SUCCESS; } // 释放缓存的检测数据 void LapWeldPresenter::_ClearDetectionDataCache() { std::lock_guard lock(m_detectionDataMutex); // 释放加载的数据 m_dataLoader.FreeLaserScanData(m_detectionDataCache); LOG_DEBUG("Detection data cache cleared successfully\n"); } // 发送检测结果给TCP客户端 void LapWeldPresenter::_SendDetectionResultToTCP(const DetectionResult& detectionResult, int cameraIndex) { LOG_INFO("Sending detection result for camera %d\n", cameraIndex); // 检查是否有检测结果 if (detectionResult.positions.empty()) { LOG_INFO("No objects detected\n"); if (m_pStatus) { QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast(m_vrEyeDeviceList.size())) ? QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex); m_pStatus->OnStatusUpdate(QString("检测完成,%1未发现目标").arg(cameraName).toStdString()); } } else { LOG_INFO("Detected %zu objects for camera %d\n", detectionResult.positions.size(), cameraIndex); if (m_pStatus) { QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast(m_vrEyeDeviceList.size())) ? QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex); m_pStatus->OnStatusUpdate(QString("检测完成,%1发现%2个目标").arg(cameraName).arg(detectionResult.positions.size()).toStdString()); } } } // 实现配置改变通知接口 void LapWeldPresenter::OnConfigChanged(const ConfigResult& configResult) { LOG_INFO("Configuration changed notification received, reloading algorithm parameters\n"); // 重新初始化算法参数 int result = InitAlgorithmParams(); if (result == SUCCESS) { LOG_INFO("Algorithm parameters reloaded successfully after config change\n"); if (m_pStatus) { m_pStatus->OnStatusUpdate("配置已更新,算法参数重新加载成功"); } } else { LOG_ERROR("Failed to reload algorithm parameters after config change, error: %d\n", result); if (m_pStatus) { m_pStatus->OnStatusUpdate("配置更新后算法参数重新加载失败"); } } } // 根据相机索引获取调平参数 SSG_planeCalibPara LapWeldPresenter::_GetCameraCalibParam(int cameraIndex) { // 查找指定相机索引的调平参数 SSG_planeCalibPara calibParam; // 使用单位矩阵(未校准状态) double identityMatrix[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0}; for (int i = 0; i < 9; i++) { calibParam.planeCalib[i] = identityMatrix[i]; calibParam.invRMatrix[i] = identityMatrix[i]; } calibParam.planeHeight = -1.0; // 使用默认高度 // 从 ConfigManager 获取算法参数 VrAlgorithmParams algorithmParams = m_pConfigManager->GetAlgorithmParams(); for (const auto& cameraParam : algorithmParams.planeCalibParam.cameraCalibParams) { if (cameraParam.cameraIndex == cameraIndex) { // 根据isCalibrated标志决定使用标定矩阵还是单位矩阵 if (cameraParam.isCalibrated) { // 使用实际的标定矩阵 for (int i = 0; i < 9; i++) { calibParam.planeCalib[i] = cameraParam.planeCalib[i]; calibParam.invRMatrix[i] = cameraParam.invRMatrix[i]; } calibParam.planeHeight = cameraParam.planeHeight; } } } return calibParam; } // 设置默认相机索引 void LapWeldPresenter::SetDefaultCameraIndex(int cameraIndex) { LOG_INFO("Setting default camera index from %d to %d\n", m_currentCameraIndex, cameraIndex); // 验证相机索引的有效性(cameraIndex是配置中的索引,从1开始) if (cameraIndex < 1 || cameraIndex > static_cast(m_vrEyeDeviceList.size())) { LOG_WARNING("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size()); if (m_pStatus) { m_pStatus->OnStatusUpdate(QString("无效的相机索引: %1,有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString()); } return; } // 更新默认相机索引 m_currentCameraIndex = cameraIndex; LOG_INFO("Default camera index updated to %d\n", m_currentCameraIndex); if (m_pStatus) { QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast(m_vrEyeDeviceList.size())) ? QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex); m_pStatus->OnStatusUpdate(QString("设置%1为默认相机").arg(cameraName).toStdString()); } } // 保存检测数据到文件(默认实现) int LapWeldPresenter::SaveDetectionDataToFile(const std::string& filePath) { LOG_INFO("Saving detection data to file: %s\n", filePath.c_str()); if (m_detectionDataCache.empty()) { LOG_WARNING("No detection data available for saving\n"); return ERR_CODE(DEV_DATA_INVALID); } // 保存数据到文件 int lineNum = static_cast(m_detectionDataCache.size()); float scanSpeed = 0.0f; int maxTimeStamp = 0; int clockPerSecond = 0; int result = m_dataLoader.SaveLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond); if (result == SUCCESS) { LOG_INFO("Successfully saved %d lines of detection data to file: %s\n", lineNum, filePath.c_str()); } else { LOG_ERROR("Failed to save detection data, error: %s\n", m_dataLoader.GetLastError().c_str()); } return result; } // ============ 实现 ICameraLevelCalculator 接口 ============ bool LapWeldPresenter::CalculatePlaneCalibration( const std::vector>& scanData, double planeCalib[9], double& planeHeight, double invRMatrix[9]) { try { // 检查是否有足够的扫描数据 if (scanData.empty()) { LOG_ERROR("No scan data available for plane calibration\n"); return false; } LOG_INFO("Calculating plane calibration from %zu scan lines\n", scanData.size()); // 转换为算法需要的XYZ格式 LaserDataLoader dataLoader; std::vector> xyzData; int convertResult = dataLoader.ConvertToSVzNL3DPosition(scanData, xyzData); if (convertResult != SUCCESS || xyzData.empty()) { LOG_WARNING("Failed to convert data to XYZ format or no XYZ data available\n"); return false; } // 调用焊接项目的调平算法 SSG_planeCalibPara calibResult = sx_getBaseCalibPara(xyzData); // 将结构体中的数据复制到输出参数 for (int i = 0; i < 9; i++) { planeCalib[i] = calibResult.planeCalib[i]; invRMatrix[i] = calibResult.invRMatrix[i]; } planeHeight = calibResult.planeHeight; LOG_INFO("Plane calibration calculated successfully: height=%.3f\n", planeHeight); return true; } catch (const std::exception& e) { LOG_ERROR("Exception in CalculatePlaneCalibration: %s\n", e.what()); return false; } catch (...) { LOG_ERROR("Unknown exception in CalculatePlaneCalibration\n"); return false; } } // ============ 实现 ICameraLevelResultSaver 接口 ============ bool LapWeldPresenter::SaveLevelingResults(double planeCalib[9], double planeHeight, double invRMatrix[9], int cameraIndex, const QString& cameraName) { try { if (!m_pConfigManager) { LOG_ERROR("ConfigManager is null, cannot save leveling results\n"); return false; } // 验证传入的相机参数 if (cameraIndex <= 0) { LOG_ERROR("Invalid camera index: %d\n", cameraIndex); return false; } if (cameraName.isEmpty()) { LOG_ERROR("Camera name is empty\n"); return false; } // 获取当前配置 QString configPath = PathManager::GetInstance().GetConfigFilePath(); LOG_INFO("Config path: %s\n", configPath.toUtf8().constData()); SystemConfig systemConfig = m_pConfigManager->GetConfig(); // 创建或更新指定相机的调平参数 VrCameraPlaneCalibParam cameraParam; cameraParam.cameraIndex = cameraIndex; cameraParam.cameraName = cameraName.toStdString(); cameraParam.planeHeight = planeHeight; cameraParam.isCalibrated = true; // 复制校准矩阵 for (int i = 0; i < 9; i++) { cameraParam.planeCalib[i] = planeCalib[i]; cameraParam.invRMatrix[i] = invRMatrix[i]; } // 更新配置中的相机校准参数 systemConfig.configResult.algorithmParams.planeCalibParam.SetCameraCalibParam(cameraParam); // 更新并保存配置 if (!m_pConfigManager->UpdateFullConfig(systemConfig)) { LOG_ERROR("Failed to update config with leveling results\n"); return false; } if (!m_pConfigManager->SaveConfigToFile(configPath.toStdString())) { LOG_ERROR("Failed to save config file with leveling results\n"); return false; } LOG_INFO("Leveling results saved successfully for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData()); LOG_INFO("Plane height: %.3f\n", planeHeight); LOG_INFO("Calibration marked as completed\n"); return true; } catch (const std::exception& e) { LOG_ERROR("Exception in SaveLevelingResults: %s\n", e.what()); return false; } } bool LapWeldPresenter::LoadLevelingResults(int cameraIndex, const QString& cameraName, double planeCalib[9], double& planeHeight, double invRMatrix[9]) { try { if (!m_pConfigManager) { LOG_ERROR("ConfigManager is null, cannot load calibration data\n"); return false; } // 从ConfigManager获取配置结果 ConfigResult configResult = m_pConfigManager->GetConfigResult(); // 获取指定相机的标定参数 VrCameraPlaneCalibParam cameraParamValue; if (!configResult.algorithmParams.planeCalibParam.GetCameraCalibParam(cameraIndex, cameraParamValue) || !cameraParamValue.isCalibrated) { LOG_INFO("No calibration data found for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData()); return false; } // 复制标定数据 for (int i = 0; i < 9; i++) { planeCalib[i] = cameraParamValue.planeCalib[i]; invRMatrix[i] = cameraParamValue.invRMatrix[i]; } planeHeight = cameraParamValue.planeHeight; LOG_INFO("Calibration data loaded successfully for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData()); LOG_INFO("Plane height: %.3f\n", planeHeight); return true; } catch (const std::exception& e) { LOG_ERROR("Exception in LoadLevelingResults: %s\n", e.what()); return false; } }