Ensenso手眼標定cpp實現

原理Ensenso SDK有介紹,這裏是代碼實現的簡易版本。

需要修改serial number ,我的是194224。

使用方法:

將halcon標定板固定在機械臂上,運行代碼,移動機械臂,輸入機械臂上標定板當前的姿態(重複次數大於5即可)

成功的話,相機參數的link就已經被修改了,此時的座標系統一爲機械臂基座標系。

目前機械臂的移動是我手動控制的,也可以固定某些點輸入,自動運行。

有問題的話,可以加我QQ:1441405602

#include <stdio.h>

#include "nxLib.h"
#include <ros/ros.h>
#include <tf2_ros/transform_broadcaster.h>
#include <tf2_geometry_msgs/tf2_geometry_msgs.h> // Needed for conversion from geometry_msgs to tf2::Transform
#include <geometry_msgs/Pose.h>
#include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/TransformStamped.h>
#include <string>
#include <tf2_ros/buffer.h>
#include <tf2/LinearMath/Transform.h>

#include <tf/transform_listener.h>
#include <tf/transform_broadcaster.h>

#include <iostream>
using namespace ros;
using namespace std;
bool poseIsValid(const tf::Pose& pose)
{
  auto origin = pose.getOrigin();
  if (std::isnan(origin.x()) || std::isnan(origin.y()) || std::isnan(origin.z()))
  {
    return false;
  }

  auto rotation = pose.getRotation();
  if (std::isnan(rotation.getAngle()))
  {
    return false;
  }

  auto rotationAxis = rotation.getAxis();
  if (std::isnan(rotationAxis.x()) || std::isnan(rotationAxis.y()) || std::isnan(rotationAxis.z()))
  {
    return false;
  }

  return true;
}

void writePoseToNxLib(tf::Pose const& pose, NxLibItem const& node)
{
  // Initialize the node to be empty. This is necessary, because there is a bug in some versions of the NxLib that
  // overwrites the whole transformation node with an identity transformation as soon as a new node in /Links gets
  // created.
  node.setNull();

  if (poseIsValid(pose))
  {
    auto origin = pose.getOrigin();
    node[itmTranslation][0] = origin.x() * 1000;  // ROS transformation is in
                                                  // meters, NxLib expects it to
                                                  // be in millimeters.
    node[itmTranslation][1] = origin.y() * 1000;
    node[itmTranslation][2] = origin.z() * 1000;

    auto rotation = pose.getRotation();
    node[itmRotation][itmAngle] = rotation.getAngle();

    auto rotationAxis = rotation.getAxis();
    node[itmRotation][itmAxis][0] = rotationAxis.x();
    node[itmRotation][itmAxis][1] = rotationAxis.y();
    node[itmRotation][itmAxis][2] = rotationAxis.z();
  }
  else
  {
    // Use an identity transformation as a reasonable default value.
    node[itmTranslation][0] = 0;
    node[itmTranslation][1] = 0;
    node[itmTranslation][2] = 0;

    node[itmRotation][itmAngle] = 0;
    node[itmRotation][itmAxis][0] = 1;
    node[itmRotation][itmAxis][1] = 0;
    node[itmRotation][itmAxis][2] = 0;
  }
}

int main(void)
{
	try {
		// Initialize NxLib and enumerate cameras
		nxLibInitialize(true);

		// Reference to the first camera in the node BySerialNo
		NxLibItem root;
		NxLibItem camera = root[itmCameras][itmBySerialNo][0];

		// Open the Ensenso
		NxLibCommand open(cmdOpen);
		open.parameters()[itmCameras] = camera[itmSerialNumber].asString();
		open.execute();

		// We assume that the camera with the serial "1234" is already open. See here for information on how this
                // can be done.
 
                // Move your robot into a suitable starting position here. Make sure that the pattern can be seen from
                // the selected position.
               //tf::StampedTransform robotPose;
               // std::vector<tf::Pose> handEyeCalibrationRobotPoses;
                vector<tf::Transform> robotPoses;
                geometry_msgs::Pose robot_pose;

 
               // Set the pattern's grid spacing so that we don't need to decode the data from the pattern later. You
               // will need to adapt this line to the size of the calibration pattern that you are using.
               NxLibItem()[itmParameters][itmPattern][itmGridSpacing] = 20;
 
               // Discard any pattern observations that might already be in the pattern buffer.
               NxLibCommand(cmdDiscardPatterns).execute();
 
               // Turn off the camera's projector so that we can observe the calibration pattern.
               NxLibItem()[itmCameras]["194224"][itmParameters][itmCapture][itmProjector] = false;
               NxLibItem()[itmCameras]["194224"][itmParameters][itmCapture][itmFrontLight] = true;
 
               // We will observe the pattern 20 times. You can adapt this number depending on how accurate you need the
               // calibration to be.
              for (int i = 0; i < 10; i++) {
                   // Move your robot to a new position from which the pattern can be seen. It might be a good idea to
                   // choose these positions randomly.
                   cout<<"Please enter x:";
                   cin>>robot_pose.position.x;
                   cout<<"Please enter y:";
                   cin>>robot_pose.position.y;
                   cout<<"Please enter z:";
                   cin>>robot_pose.position.z;

                   cout<<"Please enter rw:";
                   cin>>robot_pose.orientation.w;
                   cout<<"Please enter rx:";
                   cin>>robot_pose.orientation.x;
                   cout<<"Please enter ry:";
                   cin>>robot_pose.orientation.y;
                   cout<<"Please enter rz:";
                   cin>>robot_pose.orientation.z;
                   

                   tf::Pose tfPose;
                   tf::poseMsgToTF(robot_pose, tfPose);
                   robotPoses.push_back(tfPose);

  
                  // Make sure that the robot is not moving anymore. You might want to wait for a few seconds to avoid
                  // any oscillations.
                   sleep(2);
 
                   // Observe the calibration pattern and store the observation in the pattern buffer.
                   NxLibCommand capture(cmdCapture);
                   capture.parameters()[itmCameras] = "194224";
                   capture.execute();
                   bool foundPattern = false;
                   try {
                       NxLibCommand collectPattern(cmdCollectPattern);
                       collectPattern.parameters()[itmCameras] = "194224";
                       collectPattern.execute();
                       foundPattern = true;
                   } catch (NxLibException& e) {
                     printf(
		    "An NxLib API error with code %d (%s) occurred while accessing item %s.\n", e.getErrorCode(),
		    e.getErrorText().c_str(), e.getItemPath().c_str());}
 
                   if (foundPattern) {
                       // We actually found a pattern. Get the current pose of your robot (from which the pattern was
                       // observed) and store it somewhere.
                       cout<< i <<"success"<<endl;

                   } else {
                       // The calibration pattern could not be found in the camera image. When your robot poses are
                       // selected randomly, you might want to choose a different one.
                   }
               }
 
               // You can insert a recalibration here, as you already captured stereo patterns anyway. See here for a
               // code snippet that does a recalibration.
 
               // We collected enough patterns and can start the calibration.
               NxLibCommand calibrateHandEye(cmdCalibrateHandEye);
               calibrateHandEye.parameters()[itmSetup] = valFixed; // Or "valMoving" when your have a moving setup.
 
               // At this point, you need to put your stored robot poses into the command's Transformations parameter.
              //calibrateHandEye.parameters()[itmTransformations] = robotPoses;
               for (size_t i = 0; i < robotPoses.size(); i++)
               {
                   writePoseToNxLib(robotPoses[i], calibrateHandEye.parameters()[itmTransformations][i]);
               }
 
               // Start the calibration. Note that this might take a few minutes if you did a lot of pattern observations.
               calibrateHandEye.execute();
 
               // Store the new calibration to the camera's EEPROM.
               NxLibCommand storeCalibration(cmdStoreCalibration);
               storeCalibration.parameters()[itmCameras] = "194224";
               storeCalibration.parameters()[itmLink] = true;
               storeCalibration.execute();

		// Close & finalize
		NxLibCommand close(cmdClose);
		close.execute();
	} catch (NxLibException& e) { // Display NxLib API exceptions, if any
		printf(
		    "An NxLib API error with code %d (%s) occurred while accessing item %s.\n", e.getErrorCode(),
		    e.getErrorText().c_str(), e.getItemPath().c_str());
		if (e.getErrorCode() == NxLibExecutionFailed)
			printf("/Execute:\n%s\n", NxLibItem(itmExecute).asJson(true).c_str());
	} 
	nxLibFinalize();

	return 0;
}

 

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章