bool MYThresHold(const uchar* pSrcImg, uchar* &pDstImg, int nWidth, int nHeight, int nMinGray, int nMaxGray) {
uchar *pTemp = new uchar[nWidth * nHeight];
for (int i = 0;i < nHeight;i++)
{
for (int j = 0;j < nWidth;j++)
{
if (pSrcImg[i * nWidth + j] >= nMinGray && pSrcImg[i * nWidth + j] <= nMaxGray) {
pTemp[i * nWidth + j] = 255;
}
else {
pTemp[i * nWidth + j] = 0;
}
}
}
memcpy(pDstImg, pTemp, nWidth* nHeight);
delete[]pTemp;
pTemp = NULL;
return true;
}
void HOIImageCenterPoint(uchar* pSrcImg, int nImgWidth, int nImgHeight, ImgProcParam params, vector<Point2f> &ptCenter) {
Mat SrcMat(nImgWidth, nImgHeight, CV_8UC3);
memcpy(SrcMat.data, pSrcImg, nImgHeight*nImgWidth * 24);
Mat grayMat;
cvtColor(SrcMat, grayMat, COLOR_BGR2GRAY);
Mat binMat = cv::Mat::zeros(SrcMat.size(), CV_8UC1);
MYThresHold(grayMat.data, binMat.data, SrcMat.cols, SrcMat.rows, params.nGrayMin, params.nGrayMax);
vector<vector<Point> > contours;
vector<Vec4i> hierarchy;
findContours(binMat, contours, hierarchy, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
for (size_t i = 0; i < contours.size(); ++i) {
double fArea = contourArea(contours[i]);
if (fArea < params.nAreaMin || fArea > params.nAreaMax) {
continue;
}
Moments mm = moments(contours[i], false);
Point2f pnt = Point2f(mm.m10 / mm.m00, mm.m01 / mm.m00);//计算中心点
ptCenter.push_back(pnt);
}
}
double RMS_Add(vector<Point2f> &points_camera, vector<Point2f> &points_robot)
{
Mat warpMat = estimateAffine2D(points_camera, points_robot);
double rms = 0;
if (warpMat.cols == 3 && warpMat.rows == 2)
{
double A = warpMat.ptr<double>(0)[0];
double B = warpMat.ptr<double>(0)[1];
double C = warpMat.ptr<double>(0)[2];
double D = warpMat.ptr<double>(1)[0];
double E = warpMat.ptr<double>(1)[1];
double F = warpMat.ptr<double>(1)[2];
/*****************************************************RMS误差***********************************************/
double sumX = 0, sumY = 0;
for (int i = 0; i < 9; i++)
{
Point2f pt;
pt.x = A * points_camera[i].x + B * points_camera[i].y + C;//图像坐标通过仿射矩阵转换为机器人坐标
pt.y = D * points_camera[i].x + E * points_camera[i].y + F;
sumX += pow(points_robot[i].x - pt.x, 2);
sumY += pow(points_robot[i].y - pt.y, 2);
}
rms = sqrt(sumX / 9) + sqrt(sumY / 9);
}
return rms;
}
Mat RMS(vector<Point2f> &points_camera, vector<Point2f> &points_robot)
{
Mat warpMat = estimateAffine2D(points_camera, points_robot);//仿射变换
double rms = 0;
if (warpMat.cols == 3 && warpMat.rows == 2)
{
double A = warpMat.ptr<double>(0)[0];
double B = warpMat.ptr<double>(0)[1];
double C = warpMat.ptr<double>(0)[2];
double D = warpMat.ptr<double>(1)[0];
double E = warpMat.ptr<double>(1)[1];
double F = warpMat.ptr<double>(1)[2];
/*****************************************************RMS误差***********************************************/
double sumX = 0, sumY = 0;
for (int i = 0; i < 9; i++)
{
Point2f pt;
pt.x = A * points_camera[i].x + B * points_camera[i].y + C;
pt.y = D * points_camera[i].x + E * points_camera[i].y + F;
sumX += pow(points_robot[i].x - pt.x, 2);
sumY += pow(points_robot[i].y - pt.y, 2);
}
rms = sqrt(sumX / 9) + sqrt(sumY / 9);
}
for (int k = 0; k < 3; k++)
{
for (int i = 0; i < 9; i++)
{
/**********************************************对X操作**********************************************************/
points_robot[i].x += 0.01;
double curr = RMS_Add(points_camera, points_robot);
if (curr < rms)
{
while (curr < rms)
{
points_robot[i].x += 0.01;
rms = curr;
curr = RMS_Add(points_camera, points_robot);
}
points_robot[i].x -= 0.01;
}
else
{
points_robot[i].x -= 0.02;
curr = RMS_Add(points_camera, points_robot);
while (curr < rms)
{
points_robot[i].x -= 0.01;
rms = curr;
curr = RMS_Add(points_camera, points_robot);
}
points_robot[i].x += 0.01;
}
/**********************************************对Y操作**********************************************************/
points_robot[i].y += 0.01;
curr = RMS_Add(points_camera, points_robot);
if (curr < rms)
{
while (curr < rms)
{
points_robot[i].y += 0.01;
rms = curr;
curr = RMS_Add(points_camera, points_robot);
}
points_robot[i].y -= 0.01;
}
else
{
points_robot[i].y -= 0.02;
curr = RMS_Add(points_camera, points_robot);
while (curr < rms)
{
points_robot[i].y -= 0.01;
rms = curr;
curr = RMS_Add(points_camera, points_robot);
}
points_robot[i].y += 0.01;
}
}
}
Mat warp = estimateAffine2D(points_camera, points_robot);//仿射变换
return warp;
}
bool HandEyeCalibrate(vector<Point2f>points_camera, vector<Point2f>points_robot) {
if (points_camera.size() < 9 || points_robot.size() < 9) {
return false;
}
Mat warpMat = RMS(points_camera, points_robot);
_stCalibrateParams.A = warpMat.ptr<double>(0)[0];
_stCalibrateParams.B = warpMat.ptr<double>(0)[1];
_stCalibrateParams.C = warpMat.ptr<double>(0)[2];
_stCalibrateParams.D = warpMat.ptr<double>(1)[0];
_stCalibrateParams.E = warpMat.ptr<double>(1)[1];
_stCalibrateParams.F = warpMat.ptr<double>(1)[2];
return true;
}
void ImgPointToRobotPoint(Point2f ptI, Point2f &ptR) {
ptR.x = _stCalibrateParams.A * ptI.x + _stCalibrateParams.B * ptI.y + _stCalibrateParams.C;
ptR.y = _stCalibrateParams.D * ptI.x + _stCalibrateParams.E * ptI.y + _stCalibrateParams.F;
}
九点标定 opencv 方式实现 手眼标定
需积分: 5 18 浏览量
2023-10-18
14:26:27
上传
评论 2
收藏 2KB ZIP 举报
孙春泉
- 粉丝: 3
- 资源: 9
最新资源
- kernel-ml-6.8.8-1.el7.elrepo.x86-64.rpm
- Labview基本框架之状态机
- HM2309B-VB一款P-Channel沟道SOT23的MOSFET晶体管参数介绍与应用说明
- Git安全实践:保护你的代码仓库个人学习笔记.md
- 自动驾驶定位系列教程九:后端优化.pdf
- 三国志5威力加强版-windows
- HM2309A-VB一款P-Channel沟道SOT23的MOSFET晶体管参数介绍与应用说明
- HM2306-VB一款N-Channel沟道SOT23的MOSFET晶体管参数介绍与应用说明
- Git进阶技巧:提升团队协作效率个人学习笔记.md
- 自动驾驶定位系列教程八:建图系统结构优化.pdf
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈