63#include <visp3/core/vpConfig.h>
64#include <visp3/core/vpCameraParameters.h>
65#include <visp3/core/vpIoTools.h>
66#include <visp3/core/vpXmlParserCamera.h>
67#include <visp3/detection/vpDetectorAprilTag.h>
68#include <visp3/gui/vpDisplayFactory.h>
69#include <visp3/gui/vpPlot.h>
70#include <visp3/io/vpImageIo.h>
71#include <visp3/robot/vpRobotFranka.h>
72#include <visp3/sensor/vpRealSense2.h>
73#include <visp3/visual_features/vpFeatureBuilder.h>
74#include <visp3/visual_features/vpFeaturePoint.h>
75#include <visp3/vs/vpServo.h>
76#include <visp3/vs/vpServoDisplay.h>
78#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_FRANKA) && defined(VISP_HAVE_PUGIXML)
80#ifdef ENABLE_VISP_NAMESPACE
85 std::vector<vpImagePoint> *traj_vip)
87 for (
size_t i = 0;
i < vip.size(); ++
i) {
88 if (traj_vip[i].size()) {
91 traj_vip[
i].push_back(vip[i]);
95 traj_vip[
i].push_back(vip[i]);
98 for (
size_t i = 0;
i < vip.size(); ++
i) {
99 for (
size_t j = 1;
j < traj_vip[
i].size();
j++) {
105int main(
int argc,
char **argv)
107 double opt_tag_size = 0.120;
108 bool opt_tag_z_aligned =
false;
109 std::string opt_robot_ip =
"192.168.1.1";
110 std::string opt_eMc_filename =
"";
111 std::string opt_intrinsic_filename =
"";
112 std::string opt_camera_name =
"Camera";
113 bool display_tag =
true;
114 int opt_quad_decimate = 2;
115 bool opt_verbose =
false;
116 bool opt_plot =
false;
117 bool opt_adaptive_gain =
false;
118 bool opt_task_sequencing =
false;
119 double convergence_threshold = 0.00005;
121 for (
int i = 1;
i < argc; ++
i) {
122 if ((std::string(argv[i]) ==
"--tag-size") && (i + 1 < argc)) {
123 opt_tag_size = std::stod(argv[++i]);
125 else if ((std::string(argv[i]) ==
"--tag-quad-decimate") && (i + 1 < argc)) {
126 opt_quad_decimate = std::stoi(argv[++i]);
128 else if (std::string(argv[i]) ==
"--tag-z-aligned") {
129 opt_tag_z_aligned =
true;
131 else if ((std::string(argv[i]) ==
"--ip") && (i + 1 < argc)) {
132 opt_robot_ip = std::string(argv[++i]);
134 else if (std::string(argv[i]) ==
"--intrinsic" && i + 1 < argc) {
135 opt_intrinsic_filename = std::string(argv[++i]);
137 else if (std::string(argv[i]) ==
"--camera-name" && i + 1 < argc) {
138 opt_camera_name = std::string(argv[++i]);
140 else if ((std::string(argv[i]) ==
"--eMc") && (i + 1 < argc)) {
141 opt_eMc_filename = std::string(argv[++i]);
143 else if (std::string(argv[i]) ==
"--verbose") {
146 else if (std::string(argv[i]) ==
"--plot") {
149 else if (std::string(argv[i]) ==
"--adaptive-gain") {
150 opt_adaptive_gain =
true;
152 else if (std::string(argv[i]) ==
"--task-sequencing") {
153 opt_task_sequencing =
true;
155 else if (std::string(argv[i]) ==
"--no-convergence-threshold") {
156 convergence_threshold = 0.;
158 else if ((std::string(argv[i]) ==
"--help") || (std::string(argv[i]) ==
"-h")) {
159 std::cout <<
"SYNOPSYS" << std::endl
161 <<
" [--ip <controller ip>]"
162 <<
" [--intrinsic <xml file>]"
163 <<
" [--camera-name <name>]"
164 <<
" [--tag-size <size>]"
165 <<
" [--tag-quad-decimate <decimation factor>]"
166 <<
" [--tag-z-aligned]"
167 <<
" [--eMc <extrinsic transformation file>]"
168 <<
" [--adaptive-gain]"
170 <<
" [--task-sequencing]"
171 <<
" [--no-convergence-threshold]"
173 <<
" [--help] [-h]\n"
175 std::cout <<
"DESCRIPTION" << std::endl
176 <<
" Use an image-based visual-servoing scheme to position the camera in front of an Apriltag." << std::endl
178 <<
" --ip <controller ip>" << std::endl
179 <<
" Franka controller ip address" << std::endl
180 <<
" Default: " << opt_robot_ip << std::endl
182 <<
" --intrinsic <xml file>" << std::endl
183 <<
" XML file that contains camera intrinsic parameters. " << std::endl
184 <<
" If no file is specified, use Realsense camera factory intrinsic parameters." << std::endl
186 <<
" --camera-name <name>" << std::endl
187 <<
" Camera name in the XML file that contains camera intrinsic parameters." << std::endl
188 <<
" Default: \"Camera\"" << std::endl
190 <<
" --tag-size <size>" << std::endl
191 <<
" Apriltag size in [m]." << std::endl
192 <<
" Default: " << opt_tag_size <<
" [m]" << std::endl
194 <<
" --tag-z-aligned" << std::endl
195 <<
" When enabled, tag z-axis and camera z-axis are aligned." << std::endl
196 <<
" Default: false" << std::endl
198 <<
" --eMc <extrinsic transformation file>" << std::endl
199 <<
" File containing the homogeneous transformation matrix between" << std::endl
200 <<
" robot end-effector and camera frame." << std::endl
202 <<
" --tag-quad-decimate <decimation factor>" << std::endl
203 <<
" Decimation factor used during Apriltag detection." << std::endl
204 <<
" Default: " << opt_quad_decimate << std::endl
206 <<
" --adaptive-gain" << std::endl
207 <<
" Flag to enable adaptive gain to speed up visual servo near convergence." << std::endl
209 <<
" --plot" << std::endl
210 <<
" Flag to enable curve plotter." << std::endl
212 <<
" --task-sequencing" << std::endl
213 <<
" Flag to enable task sequencing scheme." << std::endl
215 <<
" --no-convergence-threshold" << std::endl
216 <<
" Flag to disable convergence threshold used to stop the visual servo." << std::endl
218 <<
" --verbose" << std::endl
219 <<
" Flag to enable extra verbosity." << std::endl
221 <<
" --help, -h" << std::endl
222 <<
" Print this helper message." << std::endl
228 std::cout <<
"\nERROR" << std::endl
229 << std::string(argv[i]) <<
" command line option is not supported." << std::endl
230 <<
"Use " << std::string(argv[0]) <<
" --help" << std::endl
239 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
240 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
241 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
246#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
252 std::cout <<
"Parameters:" << std::endl;
253 std::cout <<
" Apriltag " << std::endl;
254 std::cout <<
" Size [m] : " << opt_tag_size << std::endl;
255 std::cout <<
" Z aligned : " << (opt_tag_z_aligned ?
"true" :
"false") << std::endl;
256 std::cout <<
" Camera intrinsics " << std::endl;
257 std::cout <<
" Factory parameters : " << (opt_intrinsic_filename.empty() ?
"yes" :
"no") << std::endl;
261 if (opt_intrinsic_filename.empty()) {
262 std::cout <<
"Use Realsense camera intrinsic factory parameters: " << std::endl;
264 std::cout <<
"cam:\n" <<
cam << std::endl;
267 std::cout <<
"Camera parameters file " << opt_intrinsic_filename <<
" doesn't exist." << std::endl;
272 if (!opt_camera_name.empty()) {
274 std::cout <<
" Param file name [.xml]: " << opt_intrinsic_filename << std::endl;
275 std::cout <<
" Camera name : " << opt_camera_name << std::endl;
279 std::cout <<
"Unable to parse parameters with distortion for camera \"" << opt_camera_name <<
"\" from "
280 << opt_intrinsic_filename <<
" file" << std::endl;
281 std::cout <<
"Attempt to find parameters without distortion" << std::endl;
283 if (
parser.parse(cam, opt_intrinsic_filename, opt_camera_name,
285 std::cout <<
"Unable to parse parameters without distortion for camera \"" << opt_camera_name <<
"\" from "
286 << opt_intrinsic_filename <<
" file" << std::endl;
293 std::cout <<
"Camera parameters used to compute the pose:\n" <<
cam << std::endl;
300 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
301 detector.setDisplayTag(display_tag);
302 detector.setAprilTagQuadDecimate(opt_quad_decimate);
303 detector.setZAlignedWithCameraAxis(opt_tag_z_aligned);
308 e_P_c[0] = 0.0337731;
309 e_P_c[1] = -0.00535012;
310 e_P_c[2] = -0.0523339;
311 e_P_c[3] = -0.247294;
312 e_P_c[4] = -0.306729;
316 if (!opt_eMc_filename.empty()) {
317 e_P_c.
loadYAML(opt_eMc_filename, e_P_c);
320 std::cout <<
"Warning, opt_eMc_filename is empty! Use hard coded values." << std::endl;
323 std::cout <<
"e_M_c:\n" << e_M_c << std::endl;
332 robot.connect(opt_robot_ip);
335 std::vector<vpFeaturePoint>
p(4), pd(4);
338 std::vector<vpPoint> point(4);
339 point[0].setWorldCoordinates(-opt_tag_size / 2., -opt_tag_size / 2., 0);
340 point[1].setWorldCoordinates(+opt_tag_size / 2., -opt_tag_size / 2., 0);
341 point[2].setWorldCoordinates(+opt_tag_size / 2., +opt_tag_size / 2., 0);
342 point[3].setWorldCoordinates(-opt_tag_size / 2., +opt_tag_size / 2., 0);
347 for (
size_t i = 0;
i <
p.size(); ++
i) {
348 task.addFeature(p[i], pd[i]);
353 if (opt_adaptive_gain) {
355 task.setLambda(lambda);
365 plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.getWidth()) + 80, 10,
366 "Real time curves plotter");
367 plotter->setTitle(0,
"Visual features error");
368 plotter->setTitle(1,
"Camera velocities");
371 plotter->setLegend(0, 0,
"error_feat_p1_x");
372 plotter->setLegend(0, 1,
"error_feat_p1_y");
373 plotter->setLegend(0, 2,
"error_feat_p2_x");
374 plotter->setLegend(0, 3,
"error_feat_p2_y");
375 plotter->setLegend(0, 4,
"error_feat_p3_x");
376 plotter->setLegend(0, 5,
"error_feat_p3_y");
377 plotter->setLegend(0, 6,
"error_feat_p4_x");
378 plotter->setLegend(0, 7,
"error_feat_p4_y");
379 plotter->setLegend(1, 0,
"vc_x");
380 plotter->setLegend(1, 1,
"vc_y");
381 plotter->setLegend(1, 2,
"vc_z");
382 plotter->setLegend(1, 3,
"wc_x");
383 plotter->setLegend(1, 4,
"wc_y");
384 plotter->setLegend(1, 5,
"wc_z");
387 bool final_quit =
false;
388 bool has_converged =
false;
389 bool send_velocities =
false;
390 bool servo_started =
false;
391 std::vector<vpImagePoint> *traj_corners =
nullptr;
395 robot.set_eMc(e_M_c);
400 while (!has_converged && !final_quit) {
407 std::vector<vpHomogeneousMatrix> c_M_o_vec;
408 bool ret = detector.detect(I, opt_tag_size, cam, c_M_o_vec);
411 std::stringstream ss;
412 ss <<
"Left click to " << (send_velocities ?
"stop the robot" :
"servo the robot") <<
", right click to quit.";
419 if (ret && (c_M_o_vec.size() == 1)) {
420 c_M_o = c_M_o_vec[0];
422 static bool first_time =
true;
425 std::vector<vpHomogeneousMatrix> secure_o_M_o(2), secure_cd_M_c(2);
426 secure_o_M_o[1].buildFrom(0, 0, 0, 0, 0, M_PI);
427 for (
size_t i = 0;
i < 2; ++
i) {
428 secure_cd_M_c[
i] = cd_M_o * secure_o_M_o[
i] * c_M_o.
inverse();
430 if (std::fabs(secure_cd_M_c[0].getThetaUVector().getTheta()) < std::fabs(secure_cd_M_c[1].getThetaUVector().getTheta())) {
431 o_M_o = secure_o_M_o[0];
434 std::cout <<
"Desired frame modified to avoid PI rotation of the camera" << std::endl;
435 o_M_o = secure_o_M_o[1];
439 for (
size_t i = 0;
i < point.size(); ++
i) {
441 point[
i].changeFrame(cd_M_o * o_M_o, c_P);
442 point[
i].projection(c_P, p);
451 std::vector<vpImagePoint> corners = detector.getPolygon(0);
454 for (
size_t i = 0;
i < corners.size(); ++
i) {
459 point[
i].changeFrame(c_M_o, c_P);
464 if (opt_task_sequencing) {
465 if (!servo_started) {
466 if (send_velocities) {
467 servo_started =
true;
474 v_c =
task.computeControlLaw();
479 for (
size_t i = 0;
i < corners.size(); ++
i) {
480 std::stringstream ss;
490 traj_corners =
new std::vector<vpImagePoint>[corners.size()];
493 display_point_trajectory(I, corners, traj_corners);
497 plotter->plot(1, iter_plot, v_c);
502 std::cout <<
"v_c: " << v_c.t() << std::endl;
505 double error =
task.getError().sumSquare();
506 std::stringstream ss;
507 ss <<
"error: " <<
error;
511 std::cout <<
"error: " <<
error << std::endl;
513 if (error < convergence_threshold) {
514 has_converged =
true;
515 std::cout <<
"Servo task has converged" << std::endl;
526 if (!send_velocities) {
534 std::stringstream ss;
544 send_velocities = !send_velocities;
557 std::cout <<
"Stop the robot " << std::endl;
560 if (opt_plot && plotter !=
nullptr) {
566 while (!final_quit) {
581 delete[] traj_corners;
585 std::cout <<
"ViSP exception: " <<
e.what() << std::endl;
586 std::cout <<
"Stop the robot " << std::endl;
588#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
589 if (display !=
nullptr) {
595 catch (
const franka::NetworkException &e) {
596 std::cout <<
"Franka network exception: " <<
e.what() << std::endl;
597 std::cout <<
"Check if you are connected to the Franka robot"
598 <<
" or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
600#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
601 if (display !=
nullptr) {
607 catch (
const std::exception &e) {
608 std::cout <<
"Franka exception: " <<
e.what() << std::endl;
609#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
610 if (display !=
nullptr) {
617#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
618 if (display !=
nullptr) {
628#if !defined(VISP_HAVE_REALSENSE2)
629 std::cout <<
"Install librealsense-2.x and rebuild ViSP." << std::endl;
631#if !defined(VISP_HAVE_FRANKA)
632 std::cout <<
"Install libfranka and rebuild ViSP." << std::endl;
634#if !defined(VISP_HAVE_PUGIXML)
635 std::cout <<
"Build ViSP with pugixml support enabled." << std::endl;
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor green
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
Class that consider the case of a translation vector.
XML parser to load and save intrinsic camera parameters.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()