GrabBag/GrabBagApp/Presenter/Src/GrabBagPresenter.cpp

1420 lines
53 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "GrabBagPresenter.h"
#include "VrError.h"
#include "VrLog.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QFileInfo>
#include <QtCore/QDir>
#include <QtCore/QString>
#include <QtCore/QStandardPaths>
#include <QtCore/QFile>
#include <cmath>
#include <algorithm>
#include <QImage>
#include <QThread>
#include "Version.h"
#include "VrTimeUtils.h"
#include "VrDateUtils.h"
#include "SG_bagPositioning_Export.h"
#include "SG_baseDataType.h"
#include "VrConvert.h"
#include "PointCloudImageUtils.h"
GrabBagPresenter::GrabBagPresenter()
: m_vrConfig(nullptr)
, m_pStatus(nullptr)
, m_pDetectPresenter(nullptr)
, m_pRobotProtocol(nullptr)
, m_bCameraConnected(false)
, m_bRobotConnected(false)
, m_currentWorkStatus(WorkStatus::Error)
{
m_detectionDataCache.clear();
}
GrabBagPresenter::~GrabBagPresenter()
{
// 停止配置管理器
if (m_pConfigManager) {
m_pConfigManager->Shutdown();
m_pConfigManager.reset();
}
// 停止算法检测线程
m_bAlgoDetectThreadRunning = false;
m_algoDetectCondition.notify_all();
// 等待算法检测线程结束
if (m_algoDetectThread.joinable()) {
m_algoDetectThread.join();
}
// 释放缓存的检测数据
_ClearDetectionDataCache();
// 释放机械臂协议
if (m_pRobotProtocol) {
m_pRobotProtocol->Deinitialize();
delete m_pRobotProtocol;
m_pRobotProtocol = nullptr;
}
// 释放串口协议
if (m_pSerialProtocol) {
m_pSerialProtocol->CloseSerial();
delete m_pSerialProtocol;
m_pSerialProtocol = nullptr;
}
// 释放相机设备资源
for(auto it = m_vrEyeDeviceList.begin(); it != m_vrEyeDeviceList.end(); it++)
{
if (it->second != nullptr) {
it->second->CloseDevice();
delete it->second;
it->second = nullptr;
}
}
m_vrEyeDeviceList.clear();
// 释放检测处理器
if(m_pDetectPresenter)
{
delete m_pDetectPresenter;
m_pDetectPresenter = nullptr;
}
// 释放配置对象
if (m_vrConfig) {
delete m_vrConfig;
m_vrConfig = nullptr;
}
}
int GrabBagPresenter::Init()
{
LOG_DEBUG("Start APP Version: %s\n", GRABBAG_FULL_VERSION_STRING);
// 初始化连接状态
m_bCameraConnected = false;
m_currentWorkStatus = WorkStatus::InitIng;
m_pStatus->OnWorkStatusChanged(m_currentWorkStatus);
m_pDetectPresenter = new DetectPresenter();
// 初始化VrConfig模块
IVrConfig::CreateInstance(&m_vrConfig);
// 设置配置改变通知回调
if (m_vrConfig) {
m_vrConfig->SetConfigChangeNotify(this);
}
// 获取配置文件路径
QString configPath = PathManager::GetConfigFilePath();
// 读取IP列表
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
m_projectType = configResult.projectType;
int nRet = SUCCESS;
// 初始化算法参数
nRet = InitAlgorithmParams();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("算法参数初始化失败");
LOG_ERROR("Algorithm parameters initialization failed with error: %d\n", nRet);
} else {
m_pStatus->OnStatusUpdate("算法参数初始化成功");
LOG_INFO("Algorithm parameters initialization successful\n");
}
InitCamera(configResult.cameraList);
LOG_INFO("Camera initialization completed. Connected cameras: %zu, default camera index: %d\n",
m_vrEyeDeviceList.size(), m_currentCameraIndex);
// 初始化机械臂协议
nRet = InitRobotProtocol();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("机械臂服务初始化失败");
m_bRobotConnected = false;
} else {
m_pStatus->OnStatusUpdate("机械臂服务初始化成功");
}
// 初始化串口协议
nRet = InitSerialProtocol();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("串口服务初始化失败");
m_bSerialConnected = false;
} else {
m_pStatus->OnStatusUpdate("串口服务初始化成功");
m_bSerialConnected = true;
m_pStatus->OnSerialConnectionChanged(true);
}
m_bAlgoDetectThreadRunning = true;
m_algoDetectThread = std::thread(&GrabBagPresenter::_AlgoDetectThread, this);
m_algoDetectThread.detach();
m_pStatus->OnStatusUpdate("设备初始化完成");
CheckAndUpdateWorkStatus();
QString str = QString("%1 配置初始化成功").arg(ProjectTypeToString(configResult.projectType).c_str());
m_pStatus->OnStatusUpdate(str.toStdString());
LOG_INFO("ConfigManager initialized successfully\n");
return SUCCESS;
}
// 相机协议相关方法
int GrabBagPresenter::InitCamera(std::vector<DeviceInfo>& cameraList)
{
// 通知UI相机个数
int cameraCount = cameraList.size();
// 初始化相机列表,预分配空间
m_vrEyeDeviceList.resize(cameraCount, std::make_pair("", nullptr));
for(int i = 0; i < cameraCount; i++)
{
m_vrEyeDeviceList[i] = std::make_pair(cameraList[i].name, nullptr);
}
m_pStatus->OnCameraCountChanged(cameraCount);
if(cameraCount > 0){
if (cameraCount >= 1) {
// 尝试打开相机
int nRet = _OpenDevice(1, cameraList[0].name.c_str(), cameraList[0].ip.c_str(), m_projectType);
m_pStatus->OnCamera1StatusChanged(nRet == SUCCESS);
m_bCameraConnected = (nRet == SUCCESS);
}
if (cameraCount >= 2) {
// 尝试打开相机
int nRet = _OpenDevice(2, cameraList[1].name.c_str(), cameraList[1].ip.c_str(), m_projectType);
m_pStatus->OnCamera2StatusChanged(nRet == SUCCESS);
m_bCameraConnected = (nRet == SUCCESS);
}
} else {
m_vrEyeDeviceList.resize(1, std::make_pair("", nullptr));
_OpenDevice(1, "相机", nullptr, m_projectType);
}
// 设置默认相机索引为第一个连接的相机
m_currentCameraIndex = 1; // 默认从1开始
for (int i = 0; i < static_cast<int>(m_vrEyeDeviceList.size()); i++) {
if (m_vrEyeDeviceList[i].second != nullptr) {
m_currentCameraIndex = i + 1; // 找到第一个连接的相机
break;
}
}
return SUCCESS;
}
// 初始化机械臂协议
int GrabBagPresenter::InitRobotProtocol()
{
LOG_DEBUG("Start initializing robot protocol\n");
m_pStatus->OnStatusUpdate("机械臂服务初始化...");
// 创建RobotProtocol实例
if(nullptr == m_pRobotProtocol){
m_pRobotProtocol = new RobotProtocol();
}
// 初始化协议服务使用端口502
int nRet = m_pRobotProtocol->Initialize(5020);
// 设置连接状态回调
m_pRobotProtocol->SetConnectionCallback([this](bool connected) {
this->OnRobotConnectionChanged(connected);
});
// 设置工作信号回调
m_pRobotProtocol->SetWorkSignalCallback([this](bool startWork, int cameraIndex) {
return this->OnRobotWorkSignal(startWork, cameraIndex);
});
LOG_INFO("Robot protocol initialization completed successfully\n");
return nRet;
}
// 串口协议相关方法实现
int GrabBagPresenter::InitSerialProtocol()
{
if (m_pSerialProtocol) {
LOG_WARNING("Serial protocol already initialized\n");
return SUCCESS;
}
LOG_INFO("Initializing serial protocol\n");
// 获取串口配置
QString configPath = PathManager::GetConfigFilePath();
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
// 如果串口未启用,则不初始化
if (!configResult.serialConfig.enabled) {
LOG_INFO("Serial communication is disabled in configuration\n");
return ERR_CODE(DEV_ARG_INVAILD);
}
// 创建串口协议实例
m_pSerialProtocol = new SerialProtocol();
// 设置串口回调函数
m_pSerialProtocol->SetConnectionCallback([this](bool connected) {
this->OnSerialConnectionChanged(connected);
});
m_pSerialProtocol->SetWorkSignalCallback([this](bool startWork, int cameraIndex) {
return this->OnSerialWorkSignal(startWork, cameraIndex);
});
// 打开串口
int result = m_pSerialProtocol->OpenSerial(
configResult.serialConfig.portName,
configResult.serialConfig.baudRate,
configResult.serialConfig.dataBits,
configResult.serialConfig.stopBits,
configResult.serialConfig.parity,
configResult.serialConfig.flowControl
);
if (result == SUCCESS) {
LOG_INFO("Serial protocol initialization successful\n");
m_bSerialConnected = true;
return SUCCESS;
} else {
LOG_ERROR("Serial protocol initialization failed with error: %d\n", result);
m_bSerialConnected = false;
return result;
}
}
// 初始化算法参数
int GrabBagPresenter::InitAlgorithmParams()
{
LOG_DEBUG("Start initializing algorithm parameters\n");
QString exePath = QCoreApplication::applicationFilePath();
// 清空现有的手眼标定矩阵列表
m_clibMatrixList.clear();
// 获取手眼标定文件路径并确保文件存在
QString clibPath = PathManager::GetCalibrationFilePath();
LOG_INFO("Loading hand-eye matrices from: %s\n", clibPath.toStdString().c_str());
// 读取存在的矩阵数量
int nExistMatrixNum = CVrConvert::GetClibMatrixCount(clibPath.toStdString().c_str());
LOG_INFO("Found %d hand-eye calibration matrices\n", nExistMatrixNum);
// 循环加载每个矩阵
for(int matrixIndex = 0; matrixIndex < nExistMatrixNum; matrixIndex++)
{
// 构造矩阵标识符
char matrixIdent[64];
#ifdef _WIN32
sprintf_s(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#else
sprintf(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#endif
// 创建新的标定矩阵结构
CalibMatrix calibMatrix;
// 初始化为单位矩阵
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
// 加载矩阵数据
bool loadSuccess = CVrConvert::LoadClibMatrix(clibPath.toStdString().c_str(), matrixIdent, "dCalibMatrix", calibMatrix.clibMatrix);
if(loadSuccess)
{
m_clibMatrixList.push_back(calibMatrix);
LOG_INFO("Successfully loaded matrix %d\n", matrixIndex);
// 输出矩阵内容
QString clibMatrixStr;
LOG_INFO("Matrix %d content:\n", matrixIndex);
for (int i = 0; i < 4; ++i) {
clibMatrixStr.clear();
for (int j = 0; j < 4; ++j) {
clibMatrixStr += QString::asprintf("%8.4f ", calibMatrix.clibMatrix[i * 4 + j]);
}
LOG_INFO(" %s\n", clibMatrixStr.toStdString().c_str());
}
}
else
{
LOG_WARNING("Failed to load matrix %d, using identity matrix\n", matrixIndex);
// 如果加载失败,使用单位矩阵
memcpy(calibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
m_clibMatrixList.push_back(calibMatrix);
}
}
LOG_INFO("Total loaded %zu hand-eye calibration matrices\n", m_clibMatrixList.size());
// 获取配置文件路径
QString configPath = PathManager::GetConfigFilePath();
LOG_INFO("Loading config: %s\n", configPath.toStdString().c_str());
// 读取配置文件
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
const VrAlgorithmParams& xmlParams = configResult.algorithmParams;
// 保存调试参数
m_debugParam = configResult.debugParam;
LOG_INFO("Loaded XML params - Bag: L=%.1f, W=%.1f, H=%.1f\n", xmlParams.bagParam.bagL, xmlParams.bagParam.bagW, xmlParams.bagParam.bagH);
LOG_INFO("Loaded XML params - Pile: L=%.1f, W=%.1f, H=%.1f\n",xmlParams.pileParam.pileL, xmlParams.pileParam.pileW, xmlParams.pileParam.pileH);
// 初始化算法参数结构
memset(&m_algoParam, 0, sizeof(SG_bagPositionParam));
// 设置编织袋参数
m_algoParam.bagParam.bagL = xmlParams.bagParam.bagL;
m_algoParam.bagParam.bagW = xmlParams.bagParam.bagW;
m_algoParam.bagParam.bagH = xmlParams.bagParam.bagH;
// 设置滤波参数
m_algoParam.filterParam.continuityTh = xmlParams.filterParam.continuityTh;
m_algoParam.filterParam.outlierTh = xmlParams.filterParam.outlierTh;
// 设置角点特征参数
m_algoParam.cornerParam.cornerTh = xmlParams.cornerParam.cornerTh;
m_algoParam.cornerParam.scale = xmlParams.cornerParam.scale;
m_algoParam.cornerParam.minEndingGap = xmlParams.cornerParam.minEndingGap;
m_algoParam.cornerParam.minEndingGap_z = xmlParams.cornerParam.minEndingGap_z;
m_algoParam.cornerParam.jumpCornerTh_1 = xmlParams.cornerParam.jumpCornerTh_1;
m_algoParam.cornerParam.jumpCornerTh_2 = xmlParams.cornerParam.jumpCornerTh_2;
// 设置增长参数
m_algoParam.growParam.maxLineSkipNum = xmlParams.growParam.maxLineSkipNum;
m_algoParam.growParam.yDeviation_max = xmlParams.growParam.yDeviation_max;
m_algoParam.growParam.maxSkipDistance = xmlParams.growParam.maxSkipDistance;
m_algoParam.growParam.zDeviation_max = xmlParams.growParam.zDeviation_max;
m_algoParam.growParam.minLTypeTreeLen = xmlParams.growParam.minLTypeTreeLen;
m_algoParam.growParam.minVTypeTreeLen = xmlParams.growParam.minVTypeTreeLen;
// 设置颜色参数
m_hsvCmpParam.hueTh = xmlParams.hsvCmpParam.hueTh ;
m_hsvCmpParam.saturateTh = xmlParams.hsvCmpParam.saturateTh;
m_hsvCmpParam.FBVldPtRatioTh = xmlParams.hsvCmpParam.FBVldPtRatioTh;
m_hsvCmpParam.frontVldPtGreater = xmlParams.hsvCmpParam.frontVldPtGreater;
m_hsvCmpParam.front_upVldPtGreater = xmlParams.hsvCmpParam.front_upVldPtGreater;
m_hsvCmpParam.back_upVldPtGreater = xmlParams.hsvCmpParam.back_upVldPtGreater;
m_rgbColorPattern.r = xmlParams.rgbColorPattern.r;
m_rgbColorPattern.g = xmlParams.rgbColorPattern.g;
m_rgbColorPattern.b = xmlParams.rgbColorPattern.b;
memcpy(m_frontColorTemplate, xmlParams.colorTemplateParam.frontColorTemplate, RGN_HIST_SIZE * sizeof(double));
memcpy(m_backColorTemplate, xmlParams.colorTemplateParam.backColorTemplate, RGN_HIST_SIZE * sizeof(double));
// 设置平面校准参数(保存所有相机的配置)
m_cameraCalibParams = xmlParams.planeCalibParam.cameraCalibParams;
LOG_INFO("projectType: %s\n", ProjectTypeToString(m_projectType).c_str());
LOG_INFO("Algorithm parameters initialized successfully:\n");
LOG_INFO(" Bag: L=%.1f, W=%.1f, H=%.1f\n", m_algoParam.bagParam.bagL, m_algoParam.bagParam.bagW, m_algoParam.bagParam.bagH);
LOG_INFO(" Filter: continuityTh=%.1f, outlierTh=%.1f\n", m_algoParam.filterParam.continuityTh, m_algoParam.filterParam.outlierTh);
LOG_INFO(" Corner: minEndingGap=%.1f, minEndingGap_z=%.1f, scale=%.1f, cornerTh=%.1f, jumpCornerTh_1=%.1f, jumpCornerTh_2=%.1f\n",
m_algoParam.cornerParam.minEndingGap, m_algoParam.cornerParam.minEndingGap_z, m_algoParam.cornerParam.scale, m_algoParam.cornerParam.cornerTh,
m_algoParam.cornerParam.jumpCornerTh_1, m_algoParam.cornerParam.jumpCornerTh_2);
LOG_INFO(" Grow: minVTypeTreeLen=%.1f, minLTypeTreeLen=%.1f yDeviation_max=%.1f, zDeviation_max=%.1f, maxLineSkipNum=%d, maxSkipDistance=%.1f\n",
m_algoParam.growParam.minVTypeTreeLen, m_algoParam.growParam.minLTypeTreeLen,
m_algoParam.growParam.yDeviation_max, m_algoParam.growParam.zDeviation_max,
m_algoParam.growParam.maxLineSkipNum, m_algoParam.growParam.maxSkipDistance);
// 循环打印所有相机的调平参数
LOG_INFO("Loading plane calibration parameters for all cameras:\n");
for (const auto& cameraParam : m_cameraCalibParams) {
LOG_INFO("Camera %d (%s) calibration parameters:\n",
cameraParam.cameraIndex, cameraParam.cameraName.c_str());
LOG_INFO(" Is calibrated: %s\n", cameraParam.isCalibrated ? "YES" : "NO");
LOG_INFO(" Plane height: %.3f\n", cameraParam.planeHeight);
LOG_INFO(" Plane calibration matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[0], cameraParam.planeCalib[1], cameraParam.planeCalib[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[3], cameraParam.planeCalib[4], cameraParam.planeCalib[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[6], cameraParam.planeCalib[7], cameraParam.planeCalib[8]);
LOG_INFO(" Inverse rotation matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[0], cameraParam.invRMatrix[1], cameraParam.invRMatrix[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[3], cameraParam.invRMatrix[4], cameraParam.invRMatrix[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[6], cameraParam.invRMatrix[7], cameraParam.invRMatrix[8]);
LOG_INFO(" --------------------------------\n");
}
LOG_INFO("HSV color comparison parameters:\n");
LOG_INFO(" hueTh: %.1f\n", m_hsvCmpParam.hueTh);
LOG_INFO(" saturateTh: %.1f\n", m_hsvCmpParam.saturateTh);
LOG_INFO(" FBVldPtRatioTh: %.5f\n", m_hsvCmpParam.FBVldPtRatioTh);
LOG_INFO(" frontVldPtGreater: %s\n", m_hsvCmpParam.frontVldPtGreater ? "true" : "false");
LOG_INFO(" front_upVldPtGreater: %s\n", m_hsvCmpParam.front_upVldPtGreater ? "true" : "false");
LOG_INFO(" back_upVldPtGreater: %s\n", m_hsvCmpParam.back_upVldPtGreater ? "true" : "false");
return SUCCESS;
}
// 手眼标定矩阵管理方法实现
const CalibMatrix GrabBagPresenter::GetClibMatrix(int index) const
{
CalibMatrix clibMatrix;
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
memcpy(clibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
if (index >= 0 && index < static_cast<int>(m_clibMatrixList.size())) {
clibMatrix = m_clibMatrixList[index];
memcpy(clibMatrix.clibMatrix, m_clibMatrixList[index].clibMatrix, sizeof(initClibMatrix));
} else {
LOG_WARNING("Invalid hand-eye calibration matrix\n");
}
return clibMatrix;
}
// 机械臂协议回调函数实现
void GrabBagPresenter::OnRobotConnectionChanged(bool connected)
{
LOG_INFO("Robot connection status changed: %s\n", (connected ? "connected" : "disconnected"));
// 更新机械臂连接状态
m_bRobotConnected = connected;
if (m_pStatus) {
m_pStatus->OnRobotConnectionChanged(connected);
// 发送详细的页面状态信息
if (connected) {
m_pStatus->OnStatusUpdate("机械臂连接成功,通信正常");
} else {
m_pStatus->OnStatusUpdate("机械臂连接断开,请检查网络连接");
}
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
}
bool GrabBagPresenter::OnRobotWorkSignal(bool startWork, int cameraIndex)
{
LOG_INFO("Received robot work signal: %s for camera index: %d\n", (startWork ? "start work" : "stop work"), cameraIndex);
if(!_SinglePreDetection(cameraIndex)){
return false;
}
int nRet = _SingleDetection(cameraIndex, startWork);
if(nRet != SUCCESS){
m_currentWorkStatus = WorkStatus::Ready;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
return nRet == SUCCESS;
}
void GrabBagPresenter::SetStatusCallback(IYGrabBagStatus* status)
{
m_pStatus = status;
}
// 模拟检测函数,用于演示
int GrabBagPresenter::StartDetection(int cameraIdx, bool isAuto)
{
LOG_INFO("--------------------------------\n");
LOG_INFO("Start detection with camera index: %d\n", cameraIdx);
// 检查设备状态是否准备就绪
if (isAuto && m_currentWorkStatus != WorkStatus::Ready) {
LOG_INFO("Device not ready, cannot start detection\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("设备未准备就绪,无法开始检测");
}
return ERR_CODE(DEV_BUSY);
}
// 保存当前使用的相机ID从1开始编号
if(-1 != cameraIdx){
m_currentCameraIndex = cameraIdx;
}
int cameraIndex = m_currentCameraIndex;
m_currentWorkStatus = WorkStatus::Working;
// 通知UI工作状态变更为"正在工作"
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
}
// 设置机械臂工作状态为忙碌
if (m_pRobotProtocol) {
m_pRobotProtocol->SetWorkStatus(RobotProtocol::WORK_STATUS_BUSY);
}
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return ERR_CODE(DEV_NOT_FIND);
}
// 清空检测数据缓存(释放之前的内存)
_ClearDetectionDataCache();
int nRet = SUCCESS;
// 根据参数决定启动哪些相机
// 启动指定相机cameraIndex为相机ID从1开始编号
int arrayIndex = cameraIndex - 1; // 转换为数组索引从0开始
// 检查相机是否连接
if (arrayIndex < 0 || arrayIndex >= static_cast<int>(m_vrEyeDeviceList.size()) || m_vrEyeDeviceList[arrayIndex].second == nullptr) {
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
QString cameraName = (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("%1 未连接").arg(cameraName).toStdString());
return ERR_CODE(DEV_NOT_FIND);
}
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[arrayIndex].second;
EVzResultDataType eDataType = keResultDataType_Position;
if(m_projectType == ProjectType::DirectBag){
eDataType = keResultDataType_PointXYZRGBA;
}
pDevice->SetStatusCallback(&GrabBagPresenter::_StaticCameraNotify, this);
// 开始
nRet = pDevice->StartDetect(&GrabBagPresenter::_StaticDetectionCallback, eDataType, this);
LOG_INFO("Camera ID %d start detection nRet: %d\n", cameraIndex, nRet);
if (nRet == SUCCESS) {
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测成功").arg(cameraName).toStdString());
} else {
LOG_ERROR("Camera ID %d start detection failed with error: %d\n", cameraIndex, nRet);
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测失败[%d]").arg(cameraName).arg(nRet).toStdString());
}
} else {
LOG_ERROR("Invalid camera ID: %d, valid range is 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
m_pStatus->OnStatusUpdate(QString("无效的相机ID: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
nRet = ERR_CODE(DEV_NOT_FIND);
}
return nRet;
}
int GrabBagPresenter::StopDetection()
{
LOG_INFO("Stop detection\n");
// 停止所有相机的检测
for (size_t i = 0; i < m_vrEyeDeviceList.size(); ++i) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
int ret = pDevice->StopDetect();
if (ret == 0) {
LOG_INFO("Camera %zu stop detection successfully\n", i + 1);
} else {
LOG_WARNING("Camera %zu stop detection failed, error code: %d\n", i + 1, ret);
}
}
}
// 通知UI工作状态变更为"就绪"(如果设备连接正常)
if (m_pStatus) {
// 检查设备连接状态,决定停止后的状态
if (m_bCameraConnected && (m_bRobotConnected || m_bSerialConnected)) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
m_pStatus->OnStatusUpdate("检测已停止");
}
// 设置机械臂工作状态为空闲
if (m_pRobotProtocol) {
m_pRobotProtocol->SetWorkStatus(RobotProtocol::WORK_STATUS_IDLE);
}
return 0;
}
// 加载调试数据进行检测
int GrabBagPresenter::LoadDebugDataAndDetect(const std::string& filePath)
{
LOG_INFO("Loading debug data from file: %s\n", filePath.c_str());
m_currentWorkStatus = WorkStatus::Working;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
std::string fileName = QFileInfo(QString::fromStdString(filePath)).fileName().toStdString();
m_pStatus->OnStatusUpdate(QString("加载文件:%1").arg(fileName.c_str()).toStdString());
}
// 设置机械臂工作状态为忙碌
if (m_pRobotProtocol) {
m_pRobotProtocol->SetWorkStatus(RobotProtocol::WORK_STATUS_BUSY);
}
int lineNum = 0;
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = SUCCESS;
// 1. 清空现有的检测数据缓存
_ClearDetectionDataCache();
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 使用统一的LoadLaserScanData接口自动判断文件格式
result = m_dataLoader.LoadLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
}
if (result != SUCCESS) {
LOG_ERROR("Failed to load debug data: %s\n", m_dataLoader.GetLastError().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("调试数据加载失败: " + m_dataLoader.GetLastError());
}
return result;
}
LOG_INFO("Successfully loaded %d lines of debug data\n", lineNum);
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("成功加载 %1 行调试数据").arg(lineNum).toStdString());
}
// 等待检测完成
result = _DetectTask();
return result;
}
// 为所有相机设置状态回调
void GrabBagPresenter::SetCameraStatusCallback(VzNL_OnNotifyStatusCBEx fNotify, void* param)
{
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
pDevice->SetStatusCallback(fNotify, param);
LOG_DEBUG("Status callback set for camera %zu\n", i + 1);
}
}
}
// 打开相机
int GrabBagPresenter::_OpenDevice(int cameraIndex, const char* cameraName, const char* cameraIp, ProjectType& projectType)
{
IVrEyeDevice* pDevice = nullptr;
IVrEyeDevice::CreateObject(&pDevice);
int nRet = pDevice->InitDevice();
ERR_CODE_RETURN(nRet);
// 先设置状态回调
nRet = pDevice->SetStatusCallback(&GrabBagPresenter::_StaticCameraNotify, this);
LOG_DEBUG("SetStatusCallback result: %d\n", nRet);
ERR_CODE_RETURN(nRet);
// 尝试打开相机1
nRet = pDevice->OpenDevice(cameraIp, ProjectType::DirectBag == projectType);
// 通过回调更新相机1状态
bool cameraConnected = (SUCCESS == nRet);
if(!cameraConnected){
delete pDevice; // 释放失败的设备
pDevice = nullptr;
}
int arrIdx = cameraIndex - 1;
if(m_vrEyeDeviceList.size() > arrIdx){
m_vrEyeDeviceList[arrIdx] = std::make_pair(cameraName, pDevice); // 直接存储到索引0
}
m_pStatus->OnCamera1StatusChanged(cameraConnected);
m_pStatus->OnStatusUpdate(cameraConnected ? "相机连接成功" : "相机连接失败");
m_bCameraConnected = cameraConnected;
return nRet;
}
// 判断是否可以开始检测
bool GrabBagPresenter::_SinglePreDetection(int cameraIndex)
{
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return false;
}
if(cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())){
LOG_ERROR("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
return false;
}
if(m_vrEyeDeviceList[cameraIndex - 1].second == nullptr){
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
return false;
}
return true;
}
int GrabBagPresenter::_SingleDetection(int cameraIndex, bool isStart)
{
int nRet = SUCCESS;
if (isStart) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,启动%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StartDetection(cameraIndex);
} else {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,停止%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StopDetection();
}
return nRet;
}
// 静态回调函数实现
void GrabBagPresenter::_StaticCameraNotify(EVzDeviceWorkStatus eStatus, void* pExtData, unsigned int nDataLength, void* pInfoParam)
{
// 从pInfoParam获取this指针转换回GrabBagPresenter*类型
GrabBagPresenter* pThis = reinterpret_cast<GrabBagPresenter*>(pInfoParam);
if (pThis)
{
// 调用实例的非静态成员函数
pThis->_CameraNotify(eStatus, pExtData, nDataLength, pInfoParam);
}
}
void GrabBagPresenter::_CameraNotify(EVzDeviceWorkStatus eStatus, void *pExtData, unsigned int nDataLength, void *pInfoParam)
{
LOG_DEBUG("[Camera Notify] received: status=%d\n", (int)eStatus);
switch (eStatus) {
case EVzDeviceWorkStatus::keDeviceWorkStatus_Offline:
{
LOG_WARNING("[Camera Notify] Camera device offline/disconnected\n");
// 更新相机连接状态
m_bCameraConnected = false;
// 通知UI相机状态变更
if (m_pStatus) {
// 这里需要判断是哪个相机离线暂时更新相机1状态
// 实际应用中可能需要通过pInfoParam或其他方式区分具体哪个相机
m_pStatus->OnCamera1StatusChanged(false);
m_pStatus->OnStatusUpdate("相机设备离线");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Eye_Reconnect:
{
LOG_INFO("[Camera Notify] Camera device online/connected\n");
// 更新相机连接状态
m_bCameraConnected = true;
// 通知UI相机状态变更
if (m_pStatus) {
m_pStatus->OnCamera1StatusChanged(true);
m_pStatus->OnStatusUpdate("相机设备已连接");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Device_Swing_Finish:
{
LOG_INFO("[Camera Notify] Received scan finish signal from camera\n");
// 发送页面提示信息
if (m_pStatus) {
m_pStatus->OnStatusUpdate("相机扫描完成,开始数据处理...");
}
// 通知检测线程开始处理
m_algoDetectCondition.notify_one();
break;
}
default:
break;
}
}
// 检测数据回调函数静态版本
void GrabBagPresenter::_StaticDetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
GrabBagPresenter* pThis = reinterpret_cast<GrabBagPresenter*>(pUserData);
if (pThis) {
pThis->_DetectionCallback(eDataType, pLaserLinePoint, pUserData);
}
}
// 检测数据回调函数实例版本
void GrabBagPresenter::_DetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
if (!pLaserLinePoint) {
LOG_WARNING("[Detection Callback] pLaserLinePoint is null\n");
return;
}
if (pLaserLinePoint->nPointCount <= 0) {
LOG_WARNING("[Detection Callback] Point count is zero or negative: %d\n", pLaserLinePoint->nPointCount);
return;
}
if (!pLaserLinePoint->p3DPoint) {
LOG_WARNING("[Detection Callback] p3DPoint is null\n");
return;
}
// 直接存储SVzLaserLineData到统一缓存中
SVzLaserLineData lineData;
memset(&lineData, 0, sizeof(SVzLaserLineData));
// 根据数据类型分配和复制点云数据
if (eDataType == keResultDataType_Position) {
// 复制SVzNL3DPosition数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNL3DPosition[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNL3DPosition) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DPosition[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DPosition) * pLaserLinePoint->nPointCount);
}
}
} else if (eDataType == keResultDataType_PointXYZRGBA) {
// 复制SVzNLPointXYZRGBA数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNLPointXYZRGBA[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNLPointXYZRGBA) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DLRPoint[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DLRPoint) * pLaserLinePoint->nPointCount);
}
}
}
lineData.nPointCount = pLaserLinePoint->nPointCount;
lineData.llTimeStamp = pLaserLinePoint->llTimeStamp;
lineData.llFrameIdx = pLaserLinePoint->llFrameIdx;
lineData.nEncodeNo = pLaserLinePoint->nEncodeNo;
lineData.fSwingAngle = pLaserLinePoint->fSwingAngle;
lineData.bEndOnceScan = pLaserLinePoint->bEndOnceScan;
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
m_detectionDataCache.push_back(std::make_pair(eDataType, lineData));
}
void GrabBagPresenter::CheckAndUpdateWorkStatus()
{
if (m_bCameraConnected && (m_bRobotConnected || m_bSerialConnected)) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
void GrabBagPresenter::_AlgoDetectThread()
{
while(m_bAlgoDetectThreadRunning)
{
std::unique_lock<std::mutex> lock(m_algoDetectMutex);
m_algoDetectCondition.wait(lock, [this]() {
return m_currentWorkStatus == WorkStatus::Working;
});
if(!m_bAlgoDetectThreadRunning){
break;
}
// 检查设备状态是否准备就绪
int nRet = _DetectTask();
LOG_ERROR("DetectTask result: %d\n", nRet);
if(nRet != SUCCESS){
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
LOG_DEBUG("Algo Thread end\n");
m_currentWorkStatus = WorkStatus::Ready;
}
}
int GrabBagPresenter::_DetectTask()
{
LOG_INFO("[Algo Thread] Start real detection task using algorithm\n");
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 1. 获取缓存的点云数据
if (m_detectionDataCache.empty()) {
LOG_WARNING("No cached detection data available\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("无缓存的检测数据");
}
return ERR_CODE(DEV_DATA_INVALID);
}
// 2. 准备算法输入数据
unsigned int lineNum = 0;
lineNum = m_detectionDataCache.size();
if(m_pStatus){
m_pStatus->OnStatusUpdate("扫描线数:" + std::to_string(lineNum) + ",正在算法检测...");
}
CVrTimeUtils oTimeUtils;
// 4. 根据当前相机索引获取对应的调平参数
SSG_planeCalibPara currentCameraCalibParam = _GetCameraCalibParam(m_currentCameraIndex);
// 获取当前使用的手眼标定矩阵
const CalibMatrix currentClibMatrix = GetClibMatrix(m_currentCameraIndex - 1);
DetectionResult detectionResult;
int nRet = m_pDetectPresenter->DetectBag(m_currentCameraIndex, m_detectionDataCache, m_projectType, m_algoParam,
currentCameraCalibParam, m_debugParam, m_dataLoader, currentClibMatrix.clibMatrix,
m_hsvCmpParam, m_rgbColorPattern, m_frontColorTemplate, m_backColorTemplate, detectionResult);
// 根据项目类型选择处理方式
if (m_pStatus) {
QString err = QString("错误:%1").arg(nRet);
m_pStatus->OnStatusUpdate(QString("检测%1").arg(SUCCESS == nRet ? "成功": err).toStdString());
}
ERR_CODE_RETURN(nRet);
LOG_INFO("[Algo Thread] sg_getBagPosition detected %zu objects time : %.2f ms\n", detectionResult.positions.size(), oTimeUtils.GetElapsedTimeInMilliSec());
// 8. 返回检测结果
detectionResult.cameraIndex = m_currentCameraIndex;
// 调用检测结果回调函数
m_pStatus->OnDetectionResult(detectionResult);
// 更新状态
QString statusMsg = QString("检测完成,发现%1个目标").arg(detectionResult.positions.size());
m_pStatus->OnStatusUpdate(statusMsg.toStdString());
// 更新机械臂协议状态(发送转换后的目标位置数据)
_SendDetectionResultToRobot(detectionResult, m_currentCameraIndex);
// 9. 检测完成后,将工作状态更新为"完成"
if (m_pStatus) {
m_currentWorkStatus = WorkStatus::Completed;
m_pStatus->OnWorkStatusChanged(WorkStatus::Completed);
}
// 设置机械臂工作状态为相应相机工作完成相机ID从1开始
if (m_pRobotProtocol) {
uint16_t workStatus = (m_currentCameraIndex == 1) ?
RobotProtocol::WORK_STATUS_CAMERA1_DONE :
RobotProtocol::WORK_STATUS_CAMERA2_DONE;
m_pRobotProtocol->SetWorkStatus(workStatus);
}
// 恢复到就绪状态
m_currentWorkStatus = WorkStatus::Ready;
return SUCCESS;
}
// 释放缓存的检测数据
void GrabBagPresenter::_ClearDetectionDataCache()
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 释放加载的数据
m_dataLoader.FreeLaserScanData(m_detectionDataCache);
LOG_DEBUG("Detection data cache cleared successfully\n");
}
// 发送检测结果给机械臂
void GrabBagPresenter::_SendDetectionResultToRobot(const DetectionResult& detectionResult, int cameraIndex)
{
LOG_INFO("Sending detection result for camera %d\n", cameraIndex);
// 准备多目标数据结构
MultiTargetData multiTargetData;
// 检查是否有检测结果
if (detectionResult.positions.empty()) {
LOG_INFO("No objects detected, sending empty result\n");
// 发送空的多目标数据
multiTargetData.count = 0;
multiTargetData.targets.clear();
} else {
// 获取检测到的目标位置(已经是机械臂坐标系)
const auto& positions = detectionResult.positions;
multiTargetData.count = static_cast<uint16_t>(positions.size());
// 直接使用已经转换好的机械臂坐标
for (size_t i = 0; i < positions.size(); i++) {
const GrabBagPosition& pos = positions[i];
// 直接使用已转换的坐标数据
TargetPosition robotTarget;
robotTarget.x = pos.x; // 已转换的X轴坐标
robotTarget.y = pos.y; // 已转换的Y轴坐标
robotTarget.z = pos.z; // 已转换的Z轴坐标
robotTarget.rz = pos.yaw; // Yaw角
// 添加到多目标数据
multiTargetData.targets.push_back(robotTarget);
}
}
// 发送到机械臂网络ModbusTCP
bool robotSent = false;
if (m_pRobotProtocol && (m_bRobotConnected || m_bSerialConnected)) {
int result = m_pRobotProtocol->SetMultiTargetData(multiTargetData, cameraIndex);
robotSent = (result == SUCCESS);
LOG_INFO("[Robot TCP] SetMultiTargetData result: %d for camera ID: %d\n", result, cameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
if (result != SUCCESS) {
m_pStatus->OnStatusUpdate(QString("写坐标给机械臂失败,%1").arg(cameraName).toStdString());
} else {
m_pStatus->OnStatusUpdate(QString("写坐标给机械臂成功,%1").arg(cameraName).toStdString());
}
}
} else {
LOG_WARNING("Robot TCP protocol not available\n");
}
// 发送到串口
bool serialSent = false;
if (m_pSerialProtocol && m_bSerialConnected) {
int result = m_pSerialProtocol->SendMultiTargetData(multiTargetData, cameraIndex);
serialSent = (result == SUCCESS);
LOG_INFO("[Serial] SendMultiTargetData result: %d for camera ID: %d\n", result, cameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
if (result != SUCCESS) {
m_pStatus->OnStatusUpdate(QString("写坐标给串口失败,%1").arg(cameraName).toStdString());
} else {
m_pStatus->OnStatusUpdate(QString("写坐标给串口成功,%1").arg(cameraName).toStdString());
}
}
} else {
LOG_WARNING("Serial protocol not available\n");
}
// 总结发送状态
if (robotSent || serialSent) {
LOG_INFO("Detection result sent successfully (Robot TCP: %s, Serial: %s)\n",
robotSent ? "Yes" : "No", serialSent ? "Yes" : "No");
} else {
LOG_WARNING("Failed to send detection result via any protocol\n");
}
}
// 实现配置改变通知接口
void GrabBagPresenter::OnConfigChanged(const ConfigResult& configResult)
{
LOG_INFO("Configuration changed notification received, reloading algorithm parameters\n");
// 更新调试参数
m_debugParam = configResult.debugParam;
// 重新初始化算法参数
int result = InitAlgorithmParams();
if (result == SUCCESS) {
LOG_INFO("Algorithm parameters reloaded successfully after config change\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置已更新,算法参数重新加载成功");
}
} else {
LOG_ERROR("Failed to reload algorithm parameters after config change, error: %d\n", result);
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置更新后算法参数重新加载失败");
}
}
}
// 根据相机索引获取调平参数
SSG_planeCalibPara GrabBagPresenter::_GetCameraCalibParam(int cameraIndex)
{
// 查找指定相机索引的调平参数
SSG_planeCalibPara calibParam;
// 使用单位矩阵(未校准状态)
double identityMatrix[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = identityMatrix[i];
calibParam.invRMatrix[i] = identityMatrix[i];
}
calibParam.planeHeight = -1.0; // 使用默认高度
for (const auto& cameraParam : m_cameraCalibParams) {
if (cameraParam.cameraIndex == cameraIndex) {
// 根据isCalibrated标志决定使用标定矩阵还是单位矩阵
if (cameraParam.isCalibrated) {
// 使用实际的标定矩阵
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = cameraParam.planeCalib[i];
calibParam.invRMatrix[i] = cameraParam.invRMatrix[i];
}
calibParam.planeHeight = cameraParam.planeHeight;
}
}
}
return calibParam;
}
// 实现IConfigChangeListener接口
void GrabBagPresenter::OnSystemConfigChanged(const SystemConfig& config)
{
LOG_INFO("System configuration changed, applying new configuration\n");
// 更新内部配置状态
if (m_vrConfig) {
// 可以选择性地重新初始化算法参数
InitAlgorithmParams();
}
if (m_pStatus) {
m_pStatus->OnStatusUpdate("系统配置已更新");
}
}
void GrabBagPresenter::OnCameraParamChanged(int cameraIndex, const CameraUIParam& cameraParam)
{
LOG_INFO("Camera %d parameters changed: expose=%.2f, gain=%.2f, frameRate=%.2f\n",
cameraIndex, cameraParam.exposeTime, cameraParam.gain, cameraParam.frameRate);
// 应用相机参数到实际设备
if (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[cameraIndex - 1].second;
if (device) {
// 设置曝光时间
if (cameraParam.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(cameraParam.exposeTime);
int ret = device->SetEyeExpose(exposeTime);
if (ret == SUCCESS) {
LOG_INFO("Set expose time %.2f for camera %d successfully\n", cameraParam.exposeTime, cameraIndex);
} else {
LOG_ERROR("Failed to set expose time for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置增益
if (cameraParam.gain > 0) {
unsigned int gain = static_cast<unsigned int>(cameraParam.gain);
int ret = device->SetEyeGain(gain);
if (ret == SUCCESS) {
LOG_INFO("Set gain %.2f for camera %d successfully\n", cameraParam.gain, cameraIndex);
} else {
LOG_ERROR("Failed to set gain for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置帧率
if (cameraParam.frameRate > 0) {
int frameRate = static_cast<int>(cameraParam.frameRate);
int ret = device->SetFrame(frameRate);
if (ret == SUCCESS) {
LOG_INFO("Set frame rate %.2f for camera %d successfully\n", cameraParam.frameRate, cameraIndex);
} else {
LOG_ERROR("Failed to set frame rate for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动速度
if (cameraParam.swingSpeed > 0) {
float swingSpeed = static_cast<float>(cameraParam.swingSpeed);
int ret = device->SetSwingSpeed(swingSpeed);
if (ret == SUCCESS) {
LOG_INFO("Set swing speed %.2f for camera %d successfully\n", cameraParam.swingSpeed, cameraIndex);
} else {
LOG_ERROR("Failed to set swing speed for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动角度
if (cameraParam.swingStartAngle != cameraParam.swingStopAngle) {
float startAngle = static_cast<float>(cameraParam.swingStartAngle);
float stopAngle = static_cast<float>(cameraParam.swingStopAngle);
int ret = device->SetSwingAngle(startAngle, stopAngle);
if (ret == SUCCESS) {
LOG_INFO("Set swing angle %.2f-%.2f for camera %d successfully\n",
cameraParam.swingStartAngle, cameraParam.swingStopAngle, cameraIndex);
} else {
LOG_ERROR("Failed to set swing angle for camera %d, error: %d\n", cameraIndex, ret);
}
}
}
}
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("%1参数已更新").arg(cameraName).toStdString());
}
}
void GrabBagPresenter::OnAlgorithmParamChanged(const VrAlgorithmParams& algorithmParams)
{
LOG_INFO("Algorithm parameters changed, updating internal configuration\n");
// 更新算法参数
// 这里可以重新初始化算法参数或直接更新内部状态
InitAlgorithmParams();
if (m_pStatus) {
m_pStatus->OnStatusUpdate("算法参数已更新");
}
}
void GrabBagPresenter::OnSerialConnectionChanged(bool connected)
{
if (connected) {
LOG_INFO("Serial connection established\n");
m_bSerialConnected = true;
} else {
LOG_INFO("Serial connection lost\n");
m_bSerialConnected = false;
}
// 更新工作状态
CheckAndUpdateWorkStatus();
}
bool GrabBagPresenter::OnSerialWorkSignal(bool startWork, int cameraIndex)
{
LOG_INFO("Serial work signal received: %s, camera index: %d\n", startWork ? "start" : "stop", cameraIndex);
if(!_SinglePreDetection(cameraIndex)){
return false;
}
int nRet = _SingleDetection(cameraIndex, startWork);
if(nRet != SUCCESS){
m_currentWorkStatus = WorkStatus::Ready;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
return nRet == SUCCESS;
}
// 设置默认相机索引
void GrabBagPresenter::SetDefaultCameraIndex(int cameraIndex)
{
LOG_INFO("Setting default camera index from %d to %d\n", m_currentCameraIndex, cameraIndex);
// 验证相机索引的有效性cameraIndex是配置中的索引从1开始
if (cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())) {
LOG_WARNING("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("无效的相机索引: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
}
return;
}
// 更新默认相机索引
m_currentCameraIndex = cameraIndex;
LOG_INFO("Default camera index updated to %d\n", m_currentCameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("设置%1为默认相机").arg(cameraName).toStdString());
}
}
// 保存检测数据到文件(默认实现)
int GrabBagPresenter::SaveDetectionDataToFile(const std::string& filePath)
{
LOG_INFO("Saving detection data to file: %s\n", filePath.c_str());
if (m_detectionDataCache.empty()) {
LOG_WARNING("No detection data available for saving\n");
return ERR_CODE(DEV_DATA_INVALID);
}
// 保存数据到文件
int lineNum = static_cast<int>(m_detectionDataCache.size());
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = m_dataLoader.SaveLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
if (result == SUCCESS) {
LOG_INFO("Successfully saved %d lines of detection data to file: %s\n", lineNum, filePath.c_str());
} else {
LOG_ERROR("Failed to save detection data, error: %s\n", m_dataLoader.GetLastError().c_str());
}
return result;
}