GrabBag/GrabBagApp/Presenter/Src/GrabBagPresenter.cpp
杰仔 ae2f795d2c 初步的页面和流程
TestDATA的增加
ARM版本初步编译(代码通过,缺少opencv)
2025-06-17 00:37:05 +08:00

918 lines
33 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "GrabBagPresenter.h"
#include "VrError.h"
#include "VrLog.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QFileInfo>
#include <QtCore/QDir>
#include <QtCore/QString>
#include <cmath>
#include <algorithm>
#include <QImage>
#include "VrTimeUtils.h"
#include "SG_bagPositioning_Export.h"
GrabBagPresenter::GrabBagPresenter()
: m_vrConfig(nullptr)
, m_pStatus(nullptr)
, m_pRobotProtocol(nullptr)
, m_bCameraConnected(false)
, m_bRobotConnected(false)
, m_currentWorkStatus(WorkStatus::Error)
{
}
GrabBagPresenter::~GrabBagPresenter()
{
// 停止算法检测线程
m_bAlgoDetectThreadRunning = false;
m_algoDetectCondition.notify_all();
// 释放缓存的检测数据
_ClearDetectionDataCache();
// 释放机械臂协议
if (m_pRobotProtocol) {
m_pRobotProtocol->Deinitialize();
delete m_pRobotProtocol;
m_pRobotProtocol = nullptr;
}
// 释放其他资源
for(auto it = m_vrEyeDeviceList.begin(); it != m_vrEyeDeviceList.end(); it++)
{
(*it)->CloseDevice();
delete (*it);
}
m_vrEyeDeviceList.clear();
}
int GrabBagPresenter::Init()
{
// 初始化连接状态
m_bCameraConnected = false;
m_bRobotConnected = false;
m_currentWorkStatus = WorkStatus::Error;
// 初始化VrConfig模块
IVrConfig::CreateInstance(&m_vrConfig);
// 获取可执行文件路径
QString exePath = QCoreApplication::applicationFilePath();
// 获取可执行文件所在目录
QString configPath = QFileInfo(exePath).absoluteDir().path() + "/config.xml";
// 读取IP列表
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
// 初始化机械臂协议
int nRet = InitRobotProtocol();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("机械臂服务初始化失败");
m_bRobotConnected = false;
} else {
m_pStatus->OnStatusUpdate("机械臂服务初始化成功");
}
// 初始化算法参数
nRet = InitAlgorithmParams();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("算法参数初始化失败");
LOG_ERROR("Algorithm parameters initialization failed with error: %d\n", nRet);
} else {
m_pStatus->OnStatusUpdate("算法参数初始化成功");
LOG_INFO("Algorithm parameters initialization successful\n");
}
// 通知UI相机个数
int cameraCount = configResult.cameraList.size();
m_pStatus->OnCameraCountChanged(cameraCount);
if(cameraCount > 0){
if (cameraCount >= 1) {
// 尝试打开相机1
// 初始化VrEyeDevice模块
IVrEyeDevice* pDevice = nullptr;
IVrEyeDevice::CreateObject(&pDevice);
nRet = pDevice->InitDevice();
ERR_CODE_RETURN(nRet);
nRet = pDevice->OpenDevice(configResult.cameraList[0].ip.c_str(), &GrabBagPresenter::_StaticCameraNotify, this);
// 通过回调更新相机1状态
bool camera1Connected = (SUCCESS == nRet);
if(camera1Connected)
{
m_vrEyeDeviceList.push_back(pDevice);
}
m_pStatus->OnCamera1StatusChanged(camera1Connected);
m_pStatus->OnStatusUpdate(camera1Connected ? "相机一连接成功" : "相机一连接失败");
m_bCameraConnected = camera1Connected;
}
else {
m_pStatus->OnCamera1StatusChanged(false);
m_bCameraConnected = false;
}
ERR_CODE_RETURN(nRet);
if (cameraCount >= 2) {
IVrEyeDevice* pDevice = nullptr;
IVrEyeDevice::CreateObject(&pDevice);
nRet = pDevice->InitDevice();
ERR_CODE_RETURN(nRet);
// 尝试打开相机2
nRet = pDevice->OpenDevice(configResult.cameraList[1].ip.c_str(), &GrabBagPresenter::_StaticCameraNotify, this);
// 通过回调更新相机2状态
bool camera2Connected = (SUCCESS == nRet);
if(camera2Connected)
{
m_vrEyeDeviceList.push_back(pDevice);
}
m_pStatus->OnCamera2StatusChanged(camera2Connected);
m_pStatus->OnStatusUpdate(camera2Connected ? "相机二连接成功" : "相机二连接失败");
// 只要有一个相机连接成功就认为相机连接正常
if (camera2Connected) {
m_bCameraConnected = true;
}
}
else {
// 如果只有一个相机则将相机2状态设为未连接
m_pStatus->OnCamera2StatusChanged(false);
}
ERR_CODE_RETURN(nRet);
} else {
IVrEyeDevice* pDevice = nullptr;
IVrEyeDevice::CreateObject(&pDevice);
nRet = pDevice->InitDevice();
ERR_CODE_RETURN(nRet);
// 尝试打开相机1
nRet = pDevice->OpenDevice(nullptr, &GrabBagPresenter::_StaticCameraNotify, this);
// 通过回调更新相机1状态
bool camera1Connected = (SUCCESS == nRet);
if(camera1Connected)
{
m_vrEyeDeviceList.push_back(pDevice);
}
m_pStatus->OnCamera1StatusChanged(camera1Connected);
m_pStatus->OnStatusUpdate(camera1Connected ? "相机一连接成功" : "相机一连接失败");
m_bCameraConnected = camera1Connected;
}
m_bAlgoDetectThreadRunning = true;
std::thread algoDetectThread(&GrabBagPresenter::_AlgoDetectThread, this);
algoDetectThread.detach();
m_pStatus->OnStatusUpdate("设备初始化完成");
return SUCCESS;
}
// 初始化机械臂协议
int GrabBagPresenter::InitRobotProtocol()
{
LOG_DEBUG("Start initializing robot protocol\n");
// 创建RobotProtocol实例
if(nullptr == m_pRobotProtocol){
m_pRobotProtocol = new RobotProtocol();
}
// 设置连接状态回调
m_pRobotProtocol->SetConnectionCallback([this](bool connected) {
this->OnRobotConnectionChanged(connected);
});
// 设置工作信号回调
m_pRobotProtocol->SetWorkSignalCallback([this](bool startWork, int cameraIndex) {
this->OnRobotWorkSignal(startWork, cameraIndex);
});
// 初始化协议服务使用端口503避免与原有ModbusTCP冲突
int nRet = m_pRobotProtocol->Initialize();
ERR_CODE_RETURN(nRet);
LOG_INFO("Robot protocol initialization successful\n");
return SUCCESS;
}
// 初始化算法参数
int GrabBagPresenter::InitAlgorithmParams()
{
LOG_DEBUG("Start initializing algorithm parameters\n");
// 获取可执行文件路径
QString exePath = QCoreApplication::applicationFilePath();
QString configPath = QFileInfo(exePath).absoluteDir().path() + "/config.xml";
// 读取配置文件
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
const VrAlgorithmParams& xmlParams = configResult.algorithmParams;
// 初始化算法参数结构
memset(&m_algoParam, 0, sizeof(SG_bagPositionParam));
// 设置编织袋参数
m_algoParam.bagParam.bagL = xmlParams.bagParam.bagL;
m_algoParam.bagParam.bagW = xmlParams.bagParam.bagW;
m_algoParam.bagParam.bagH = xmlParams.bagParam.bagH;
// 设置滤波参数
m_algoParam.filterParam.continuityTh = xmlParams.filterParam.continuityTh;
m_algoParam.filterParam.outlierTh = xmlParams.filterParam.outlierTh;
#if BAG_ALGO_USE_CORNER_FEATURE
// 设置角点特征参数
m_algoParam.cornerParam.cornerTh = xmlParams.cornerParam.cornerTh;
m_algoParam.cornerParam.scale = xmlParams.cornerParam.scale;
m_algoParam.cornerParam.minEndingGap = xmlParams.cornerParam.minEndingGap;
m_algoParam.cornerParam.jumpCornerTh_1 = xmlParams.cornerParam.jumpCornerTh_1;
m_algoParam.cornerParam.jumpCornerTh_2 = xmlParams.cornerParam.jumpCornerTh_2;
#else
// 设置斜率参数
m_algoParam.slopeParam.LSlopeZWin = xmlParams.slopeParam.LSlopeZWin;
m_algoParam.slopeParam.validSlopeH = xmlParams.slopeParam.validSlopeH;
m_algoParam.slopeParam.minLJumpH = xmlParams.slopeParam.minLJumpH;
m_algoParam.slopeParam.minEndingGap = xmlParams.slopeParam.minEndingGap;
// 设置山谷参数
m_algoParam.valleyPara.valleyMinH = xmlParams.valleyParam.valleyMinH;
m_algoParam.valleyPara.valleyMaxW = xmlParams.valleyParam.valleyMaxW;
#endif
// 设置增长参数
m_algoParam.growParam.maxLineSkipNum = xmlParams.growParam.maxLineSkipNum;
m_algoParam.growParam.yDeviation_max = xmlParams.growParam.yDeviation_max;
m_algoParam.growParam.maxSkipDistance = xmlParams.growParam.maxSkipDistance;
m_algoParam.growParam.zDeviation_max = xmlParams.growParam.zDeviation_max;
m_algoParam.growParam.minLTypeTreeLen = xmlParams.growParam.minLTypeTreeLen;
m_algoParam.growParam.minVTypeTreeLen = xmlParams.growParam.minVTypeTreeLen;
// 初始化平面校准参数(单位矩阵,表示不进行额外的平面校准)
double initCalib[9] = {
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0
};
for (int i = 0; i < 9; i++) {
m_planeCalibParam.planeCalib[i] = initCalib[i];
m_planeCalibParam.invRMatrix[i] = initCalib[i];
}
m_planeCalibParam.planeHeight = xmlParams.planeCalibParam.planeHeight;
LOG_INFO("Algorithm parameters initialized successfully:\n");
LOG_INFO(" Bag: L=%d, W=%d, H=%d\n", m_algoParam.bagParam.bagL, m_algoParam.bagParam.bagW, m_algoParam.bagParam.bagH);
LOG_INFO(" Filter: continuityTh=%.1f, outlierTh=%d\n", m_algoParam.filterParam.continuityTh, m_algoParam.filterParam.outlierTh);
LOG_INFO(" Plane calibration: height=%.1f\n", m_planeCalibParam.planeHeight);
return SUCCESS;
}
// 机械臂协议回调函数实现
void GrabBagPresenter::OnRobotConnectionChanged(bool connected)
{
LOG_INFO("Robot connection status changed: %s\n", (connected ? "connected" : "disconnected"));
// 更新机械臂连接状态
m_bRobotConnected = connected;
if (m_pStatus) {
m_pStatus->OnRobotConnectionChanged(connected);
m_pStatus->OnStatusUpdate(connected ? "机械臂已连接" : "机械臂连接断开");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
}
void GrabBagPresenter::OnRobotWorkSignal(bool startWork, int cameraIndex)
{
LOG_INFO("Received robot work signal: %s for camera index: %d\n", (startWork ? "start work" : "stop work"), cameraIndex);
if (nullptr == m_pStatus) {
return;
}
if (startWork) {
QString message;
if (cameraIndex == -1) {
message = "收到开始工作信号,启动所有相机检测";
} else {
message = QString("收到开始工作信号,启动相机 %1 检测").arg(cameraIndex + 1);
}
m_pStatus->OnStatusUpdate(message.toStdString());
StartDetection(cameraIndex);
} else {
QString message;
if (cameraIndex == -1) {
message = "收到停止工作信号,停止所有相机检测";
} else {
message = QString("收到停止工作信号,停止相机 %1 检测").arg(cameraIndex + 1);
}
m_pStatus->OnStatusUpdate(message.toStdString());
StopDetection();
}
}
void GrabBagPresenter::SetStatusCallback(IYGrabBagStatus* status)
{
m_pStatus = status;
}
// 模拟检测函数,用于演示
int GrabBagPresenter::StartDetection(int cameraIndex)
{
LOG_INFO("***** Start detection with camera index: %d\n", cameraIndex);
// 检查设备状态是否准备就绪
if (m_currentWorkStatus != WorkStatus::Ready) {
LOG_INFO("Device not ready, cannot start detection\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("设备未准备就绪,无法开始检测");
}
return ERR_CODE(DEV_BUSY);
}
// 通知UI工作状态变更为"正在工作"
if (m_pStatus) {
m_currentWorkStatus = WorkStatus::Working;
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
}
if(m_vrEyeDeviceList.empty())
{
LOG_ERROR("No camera device found\n");
return ERR_CODE(DEV_NOT_FIND);
}
// 清空检测数据缓存(释放之前的内存)
_ClearDetectionDataCache();
int nRet = SUCCESS;
// 根据参数决定启动哪些相机
// 启动指定相机
if (cameraIndex >= 0 && cameraIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
nRet = m_vrEyeDeviceList[cameraIndex]->StartDetect(&GrabBagPresenter::_StaticDetectionCallback, this);
if (nRet == SUCCESS) {
LOG_INFO("Camera %d start detection success\n", cameraIndex);
m_pStatus->OnStatusUpdate(QString("启动相机 %1 检测成功").arg(cameraIndex + 1).toStdString());
} else {
LOG_ERROR("Camera %d start detection failed with error: %d\n", cameraIndex, nRet);
m_pStatus->OnStatusUpdate(QString("启动相机 %1 检测失败").arg(cameraIndex + 1).toStdString());
}
} else {
LOG_ERROR("Invalid camera index: %d, available cameras: %zu\n", cameraIndex, m_vrEyeDeviceList.size());
m_pStatus->OnStatusUpdate(QString("无效的相机索引: %1可用相机数量: %2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
nRet = ERR_CODE(DEV_NOT_FIND);
}
return nRet;
}
int GrabBagPresenter::StopDetection()
{
LOG_INFO("Stop detection\n");
// 停止所有相机的检测
for (size_t i = 0; i < m_vrEyeDeviceList.size(); ++i) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i];
if (pDevice) {
int ret = pDevice->StopDetect();
if (ret == 0) {
LOG_INFO("Camera %zu stop detection successfully\n", i + 1);
} else {
LOG_WARNING("Camera %zu stop detection failed, error code: %d\n", i + 1, ret);
}
}
}
// 通知UI工作状态变更为"就绪"(如果设备连接正常)
if (m_pStatus) {
// 检查设备连接状态,决定停止后的状态
if (m_bCameraConnected && m_bRobotConnected) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
m_pStatus->OnStatusUpdate("检测已停止");
}
return 0;
}
// 加载调试数据进行检测
int GrabBagPresenter::LoadDebugDataAndDetect(const std::string& filePath)
{
LOG_INFO("Loading debug data from file: %s\n", filePath.c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("正在加载调试数据...");
}
int lineNum = 0;
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = SUCCESS;
// 清空现有的检测数据缓存
_ClearDetectionDataCache();
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
result = m_dataLoader.LoadLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
}
if (result != SUCCESS) {
LOG_ERROR("Failed to load debug data: %s\n", m_dataLoader.GetLastError().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("调试数据加载失败: " + m_dataLoader.GetLastError());
}
return result;
}
LOG_INFO("Successfully loaded %d lines of debug data\n", lineNum);
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("成功加载 %1 行调试数据").arg(lineNum).toStdString());
m_currentWorkStatus = WorkStatus::Working;
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
}
// 等待检测完成(这里可以考虑异步处理)
// 为了简化,这里直接调用检测任务
result = _DetectTask();
// 释放加载的数据
m_dataLoader.FreeLaserScanData(m_detectionDataCache);
return result;
}
IVrEyeDevice *GrabBagPresenter::GetEyeDevice(int index)
{
if(m_vrEyeDeviceList.size() <= index) return nullptr;
return m_vrEyeDeviceList[index];
}
// 静态回调函数实现
void GrabBagPresenter::_StaticCameraNotify(EVzDeviceWorkStatus eStatus, void* pExtData, unsigned int nDataLength, void* pInfoParam)
{
// 从pInfoParam获取this指针转换回GrabBagPresenter*类型
GrabBagPresenter* pThis = reinterpret_cast<GrabBagPresenter*>(pInfoParam);
if (pThis)
{
// 调用实例的非静态成员函数
pThis->_CameraNotify(eStatus, pExtData, nDataLength, pInfoParam);
}
}
void GrabBagPresenter::_CameraNotify(EVzDeviceWorkStatus eStatus, void *pExtData, unsigned int nDataLength, void *pInfoParam)
{
LOG_DEBUG("----- Camera notify received: status=%d\n", (int)eStatus);
switch (eStatus) {
case EVzDeviceWorkStatus::keDeviceWorkStatus_Offline:
{
LOG_WARNING("Camera device offline/disconnected\n");
// 更新相机连接状态
m_bCameraConnected = false;
// 通知UI相机状态变更
if (m_pStatus) {
// 这里需要判断是哪个相机离线暂时更新相机1状态
// 实际应用中可能需要通过pInfoParam或其他方式区分具体哪个相机
m_pStatus->OnCamera1StatusChanged(false);
m_pStatus->OnStatusUpdate("相机设备离线");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Eye_Reconnect:
{
LOG_INFO("Camera device online/connected\n");
// 更新相机连接状态
m_bCameraConnected = true;
// 通知UI相机状态变更
if (m_pStatus) {
m_pStatus->OnCamera1StatusChanged(true);
m_pStatus->OnStatusUpdate("相机设备已连接");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Device_Swing_Finish:
{
LOG_INFO("Received scan finish signal from camera\n");
// 通知检测线程开始处理
m_algoDetectCondition.notify_one();
break;
}
default:
LOG_DEBUG("Unhandled camera status: %d\n", (int)eStatus);
break;
}
}
// 检测数据回调函数静态版本
void GrabBagPresenter::_StaticDetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
GrabBagPresenter* pThis = reinterpret_cast<GrabBagPresenter*>(pUserData);
if (pThis && pLaserLinePoint) {
pThis->_DetectionCallback(eDataType, pLaserLinePoint);
}
}
// 检测数据回调函数实例版本
void GrabBagPresenter::_DetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint)
{
if (!pLaserLinePoint) {
return;
}
// 转换数据格式从SVzLaserLineData转换为SVzNL3DLaserLine并存储到缓存
SVzNL3DLaserLine laser3DLine;
// 复制基本信息
laser3DLine.nTimeStamp = pLaserLinePoint->llTimeStamp;
laser3DLine.nPositionCnt = pLaserLinePoint->nPointCount;
// 分配和复制点云数据
laser3DLine.p3DPosition = new SVzNL3DPosition[pLaserLinePoint->nPointCount];
// 复制点云数据
memcpy(laser3DLine.p3DPosition, pLaserLinePoint->p3DPoint, sizeof(SVzNL3DPosition) * pLaserLinePoint->nPointCount);
// 将转换后的数据保存到缓存中
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
m_detectionDataCache.push_back(laser3DLine);
}
void GrabBagPresenter::CheckAndUpdateWorkStatus()
{
if (m_bCameraConnected && m_bRobotConnected) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
m_pStatus->OnStatusUpdate("设备已准备好");
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
m_pStatus->OnStatusUpdate("设备未准备好");
}
}
void GrabBagPresenter::_AlgoDetectThread()
{
while(m_bAlgoDetectThreadRunning)
{
std::unique_lock<std::mutex> lock(m_algoDetectMutex);
m_algoDetectCondition.wait(lock, [this]() {
return m_currentWorkStatus == WorkStatus::Working;
});
// 检查设备状态是否准备就绪
_DetectTask();
}
}
int GrabBagPresenter::_DetectTask()
{
LOG_INFO("----- Start real detection task using algorithm\n");
// 1. 获取缓存的点云数据
if (m_detectionDataCache.empty()) {
LOG_WARNING("No cached detection data available\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("无缓存的检测数据");
}
return ERR_CODE(DEV_DATA_INVALID);
}
// 2. 准备算法输入数据
if(m_pStatus){
m_pStatus->OnStatusUpdate("正在进行算法检测...");
}
// 3. 使用成员变量算法参数已在初始化时从XML读取
LOG_INFO("Using algorithm parameters from XML configuration\n");
LOG_INFO(" Bag: L=%d, W=%d, H=%d\n", m_algoParam.bagParam.bagL, m_algoParam.bagParam.bagW, m_algoParam.bagParam.bagH);
LOG_INFO(" Filter: continuityTh=%.1f, outlierTh=%d\n", m_algoParam.filterParam.continuityTh, m_algoParam.filterParam.outlierTh);
CVrTimeUtils oTimeUtils;
// 4. 数据预处理:调平和去除地面(使用成员变量平面校准参数)
for (size_t i = 0; i < m_detectionDataCache.size(); i++) {
sg_lineDataR(&m_detectionDataCache[i], m_planeCalibParam.planeCalib, m_planeCalibParam.planeHeight);
}
LOG_DEBUG("sg_lineDataR time: %.2f ms\n", oTimeUtils.GetElapsedTimeInMilliSec());
// 5. 调用算法检测接口(使用成员变量参数)
oTimeUtils.Update();
std::vector<SSG_peakRgnInfo> objOps;
sg_getBagPosition(static_cast<SVzNL3DLaserLine*>(m_detectionDataCache.data()),
m_detectionDataCache.size(), m_algoParam, m_planeCalibParam, objOps);
LOG_INFO("sg_getBagPosition detected %zu objects time : %.2f ms\n", objOps.size(), oTimeUtils.GetElapsedTimeInMilliSec());
// 6. 转换检测结果为UI显示格式
DetectionResult detectionResult;
// 从点云数据生成投影图像
detectionResult.image = _GeneratePointCloudImage(static_cast<SVzNL3DLaserLine*>(m_detectionDataCache.data()),
m_detectionDataCache.size(), objOps);
// 转换检测结果
for (size_t i = 0; i < objOps.size(); i++) {
const SSG_peakRgnInfo& obj = objOps[i];
// 创建层数据(简化处理,将所有目标放在同一层)
if (detectionResult.positionLayout.empty()) {
GrabBagPositionLayout layer;
layer.layerIndex = 1;
detectionResult.positionLayout.push_back(layer);
}
// 创建位置数据
GrabBagPosition pos;
pos.x = obj.centerPos.x;
pos.y = obj.centerPos.y;
pos.z = obj.centerPos.z;
pos.roll = obj.centerPos.x_roll;
pos.pitch = obj.centerPos.y_pitch;
pos.yaw = obj.centerPos.z_yaw;
detectionResult.positionLayout[0].position.push_back(pos);
LOG_INFO("Object %zu: X=%.2f, Y=%.2f, Z=%.2f, Roll=%.2f, Pitch=%.2f, Yaw=%.2f\n",
i, pos.x, pos.y, pos.z, pos.roll, pos.pitch, pos.yaw);
}
// 8. 返回检测结果
if (objOps.size() > 0) {
// 调用检测结果回调函数
m_pStatus->OnDetectionResult(detectionResult);
// 更新状态
QString statusMsg = QString("检测完成,发现%1个目标").arg(objOps.size());
m_pStatus->OnStatusUpdate(statusMsg.toStdString());
// 更新机械臂协议状态(发送第一个检测到的目标位置)
_SendDetectionResultToRobot(objOps);
} else {
// 没有检测到目标
m_pStatus->OnStatusUpdate("检测完成,未发现目标");
LOG_WARNING("No objects detected\n");
}
// 9. 检测完成后,将工作状态更新为"完成"
if (m_pStatus) {
m_currentWorkStatus = WorkStatus::Completed;
m_pStatus->OnWorkStatusChanged(WorkStatus::Completed);
}
// 恢复到就绪状态
m_currentWorkStatus = WorkStatus::Ready;
return SUCCESS;
}
// 释放缓存的检测数据
void GrabBagPresenter::_ClearDetectionDataCache()
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
LOG_DEBUG("Clearing detection data cache, current size: %zu\n", m_detectionDataCache.size());
// 释放缓存的内存
for (auto& cachedLine : m_detectionDataCache) {
if (cachedLine.p3DPosition) {
delete[] cachedLine.p3DPosition;
cachedLine.p3DPosition = nullptr;
}
}
// 清空缓存容器
m_detectionDataCache.clear();
LOG_DEBUG("Detection data cache cleared successfully\n");
}
// 发送检测结果给机械臂
void GrabBagPresenter::_SendDetectionResultToRobot(const std::vector<SSG_peakRgnInfo>& objOps)
{
if (!m_pRobotProtocol) {
LOG_WARNING("Robot protocol not initialized, cannot send detection result\n");
return;
}
if (objOps.empty()) {
LOG_INFO("No objects detected, sending empty result to robot\n");
// 可以选择发送一个特殊的"无目标"坐标给机械臂
RobotProtocol::RobotCoordinate emptyCoord;
memset(&emptyCoord, 0, sizeof(RobotProtocol::RobotCoordinate));
emptyCoord.x = -9999.0; // 使用特殊值表示无目标
emptyCoord.y = -9999.0;
emptyCoord.z = -9999.0;
m_pRobotProtocol->SetRobotCoordinate(emptyCoord);
return;
}
// 选择第一个检测到的目标作为抓取目标
const SSG_peakRgnInfo& targetObj = objOps[0];
LOG_INFO("Processing detection result for robot: %zu objects detected\n", objOps.size());
LOG_INFO("Selected target object: X=%.2f, Y=%.2f, Z=%.2f, Roll=%.2f, Pitch=%.2f, Yaw=%.2f\n",
targetObj.centerPos.x, targetObj.centerPos.y, targetObj.centerPos.z,
targetObj.centerPos.x_roll, targetObj.centerPos.y_pitch, targetObj.centerPos.z_yaw);
// 坐标转换:从算法坐标系转换到机械臂坐标系
RobotProtocol::RobotCoordinate robotCoord;
// 基本坐标转换(根据实际的坐标系关系进行调整)
// 这里假设需要进行一些坐标系转换,具体转换关系需要根据实际标定结果确定
// 位置坐标转换 (单位转换mm -> mm坐标系转换等)
robotCoord.x = targetObj.centerPos.x; // X轴坐标
robotCoord.y = targetObj.centerPos.y; // Y轴坐标
robotCoord.z = targetObj.centerPos.z; // Z轴坐标
// 姿态角度转换 (弧度转角度,坐标系转换等)
robotCoord.rx = targetObj.centerPos.x_roll * 180.0 / M_PI; // Roll角 (弧度转角度)
robotCoord.ry = targetObj.centerPos.y_pitch * 180.0 / M_PI; // Pitch角 (弧度转角度)
robotCoord.rz = targetObj.centerPos.z_yaw * 180.0 / M_PI; // Yaw角 (弧度转角度)
// 应用坐标系偏移和旋转变换(如果需要)
// 这部分可以根据实际的手眼标定结果进行调整
// 例如:
// - 相机坐标系到机械臂基坐标系的变换
// - 工具坐标系的偏移
// - 抓取点的偏移调整
// 示例:添加一些实际应用中可能需要的偏移
robotCoord.z += 50.0; // Z轴向上偏移50mm避免碰撞
// 工作空间检查
// 检查目标位置是否在机械臂的工作空间内
double distanceFromOrigin = sqrt(robotCoord.x * robotCoord.x +
robotCoord.y * robotCoord.y +
robotCoord.z * robotCoord.z);
const double MAX_REACH = 1500.0; // 机械臂最大工作半径 (mm)
const double MIN_REACH = 200.0; // 机械臂最小工作半径 (mm)
if (distanceFromOrigin > MAX_REACH || distanceFromOrigin < MIN_REACH) {
LOG_WARNING("Target position out of robot workspace: distance=%.2f mm (valid range: %.2f-%.2f mm)\n",
distanceFromOrigin, MIN_REACH, MAX_REACH);
// 可以选择调整坐标到工作空间内,或者发送错误状态
if (distanceFromOrigin > MAX_REACH) {
// 按比例缩放到最大工作半径
double scale = MAX_REACH / distanceFromOrigin;
robotCoord.x *= scale;
robotCoord.y *= scale;
robotCoord.z *= scale;
LOG_INFO("Scaled target position to workspace boundary\n");
}
}
// 发送坐标给机械臂
int result = m_pRobotProtocol->SetRobotCoordinate(robotCoord);
if (result == 0) {
LOG_INFO("Successfully sent robot coordinates: X=%.2f, Y=%.2f, Z=%.2f, RX=%.2f, RY=%.2f, RZ=%.2f\n",
robotCoord.x, robotCoord.y, robotCoord.z,
robotCoord.rx, robotCoord.ry, robotCoord.rz);
// 更新状态信息
if (m_pStatus) {
QString statusMsg = QString("已发送目标坐标给机械臂: X=%.1f, Y=%.1f, Z=%.1f").arg(robotCoord.x).arg(robotCoord.y).arg(robotCoord.z);
m_pStatus->OnStatusUpdate(statusMsg.toStdString());
}
} else {
LOG_ERROR("Failed to send robot coordinates, error code: %d\n", result);
if (m_pStatus) {
m_pStatus->OnStatusUpdate("发送坐标给机械臂失败");
}
}
// 记录多个目标的信息(用于调试和分析)
if (objOps.size() > 1) {
LOG_INFO("Additional detected objects (%zu total):\n", objOps.size());
for (size_t i = 1; i < objOps.size() && i < 5; i++) { // 最多记录5个目标
const SSG_peakRgnInfo& obj = objOps[i];
LOG_INFO(" Object %zu: X=%.2f, Y=%.2f, Z=%.2f\n",i, obj.centerPos.x, obj.centerPos.y, obj.centerPos.z);
}
}
}
// 点云转图像 - 简化版本
QImage GrabBagPresenter::_GeneratePointCloudImage(SVzNL3DLaserLine* scanData, int lineNum,
const std::vector<SSG_peakRgnInfo>& objOps)
{
if (!scanData || lineNum <= 0) {
return QImage(); // 返回空图像
}
// 统计X和Y的范围
SVzNLRangeD x_range = {0, -1};
SVzNLRangeD y_range = {0, -1};
for (int line = 0; line < lineNum; line++) {
for (int i = 0; i < scanData[line].nPositionCnt; i++) {
SVzNL3DPosition* pt3D = &scanData[line].p3DPosition[i];
if (pt3D->pt3D.z < 1e-4) continue;
if (x_range.max < x_range.min) {
x_range.min = x_range.max = pt3D->pt3D.x;
} else {
if (x_range.min > pt3D->pt3D.x) x_range.min = pt3D->pt3D.x;
if (x_range.max < pt3D->pt3D.x) x_range.max = pt3D->pt3D.x;
}
if (y_range.max < y_range.min) {
y_range.min = y_range.max = pt3D->pt3D.y;
} else {
if (y_range.min > pt3D->pt3D.y) y_range.min = pt3D->pt3D.y;
if (y_range.max < pt3D->pt3D.y) y_range.max = pt3D->pt3D.y;
}
}
}
// 创建图像
int imgCols = 800;
int imgRows = 600;
double x_cols = 768.0;
double y_rows = 568.0;
int x_skip = 16;
int y_skip = 16;
// 计算投影比例
double x_scale = (x_range.max - x_range.min) / x_cols;
double y_scale = (y_range.max - y_range.min) / y_rows;
QImage image(imgCols, imgRows, QImage::Format_RGB888);
image.fill(Qt::black);
QPainter painter(&image);
// 定义颜色
QColor objColors[8] = {
QColor(245,222,179), QColor(210,105,30), QColor(240,230,140), QColor(135,206,235),
QColor(250,235,215), QColor(189,252,201), QColor(221,160,221), QColor(188,143,143)
};
// 绘制点云
for (int line = 0; line < lineNum; line++) {
for (int i = 0; i < scanData[line].nPositionCnt; i++) {
SVzNL3DPosition* pt3D = &scanData[line].p3DPosition[i];
if (pt3D->pt3D.z < 1e-4) continue;
int objId = (pt3D->nPointIdx >> 16) & 0xff;
QColor pointColor = (objId > 0) ? objColors[objId % 8] : QColor(60, 60, 60);
int px = (int)((pt3D->pt3D.x - x_range.min) / x_scale + x_skip);
int py = (int)((pt3D->pt3D.y - y_range.min) / y_scale + y_skip);
if (px >= 0 && px < imgCols && py >= 0 && py < imgRows) {
painter.setPen(QPen(pointColor, 1));
painter.drawPoint(px, py);
}
}
}
// 绘制检测目标
for (size_t i = 0; i < objOps.size(); i++) {
QColor objColor = (i == 0) ? QColor(255, 0, 0) : QColor(255, 255, 0);
int size = (i == 0) ? 12 : 8;
int px = (int)((objOps[i].centerPos.x - x_range.min) / x_scale + x_skip);
int py = (int)((objOps[i].centerPos.y - y_range.min) / y_scale + y_skip);
if (px >= 0 && px < imgCols && py >= 0 && py < imgRows) {
painter.setPen(QPen(objColor, 2));
painter.setBrush(QBrush(objColor));
painter.drawEllipse(px - size/2, py - size/2, size, size);
}
}
return image;
}