Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoBiclopsPoint2DArtVelocity.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * tests the control law
32 * eye-in-hand control
33 * velocity computed in articular
34 */
35
44
45#include <iostream>
46
47#include <visp3/core/vpConfig.h>
48
49#if defined(VISP_HAVE_BICLOPS) && defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_REALSENSE2)
50
51#include <visp3/core/vpDisplay.h>
52#include <visp3/core/vpException.h>
53#include <visp3/core/vpHomogeneousMatrix.h>
54#include <visp3/core/vpImage.h>
55#include <visp3/detection/vpDetectorAprilTag.h>
56#include <visp3/gui/vpDisplayFactory.h>
57#include <visp3/io/vpParseArgv.h>
58#include <visp3/robot/vpRobotBiclops.h>
59#include <visp3/sensor/vpRealSense2.h>
60#include <visp3/visual_features/vpFeatureBuilder.h>
61#include <visp3/visual_features/vpFeaturePoint.h>
62#include <visp3/vs/vpServo.h>
63#include <visp3/vs/vpServoDisplay.h>
64
65// List of allowed command line options
66#define GETOPTARGS "c:d:h"
67
68#ifdef ENABLE_VISP_NAMESPACE
69using namespace VISP_NAMESPACE_NAME;
70#endif
71
79void usage(const char *name, const char *badparam, std::string &conf)
80{
81 fprintf(stdout, "\n\
82 Example of eye-in-hand control law. We control here a real robot, the biclops\n\
83 robot (pan-tilt head provided by Traclabs) equipped with a Realsense camera\n\
84 mounted on its end-effector. The velocity to apply to the PT head is joint\n\
85 velocity. The visual feature is a point corresponding to the center of\n\
86 gravity of an AprilTag. \n\
87\n\
88SYNOPSIS\n\
89 %s [-c <Biclops configuration file>] [-h]\n",
90 name);
91
92 fprintf(stdout, "\n\
93OPTIONS: Default\n\
94 -c <Biclops configuration file> %s\n\
95 Sets the Biclops robot configuration file.\n",
96 conf.c_str());
97
98 if (badparam) {
99 fprintf(stderr, "ERROR: \n");
100 fprintf(stderr, "\nBad parameter [%s]\n", badparam);
101 }
102}
103
114bool getOptions(int argc, const char **argv, std::string &conf)
115{
116 const char *optarg_;
117 int c;
118 while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
119
120 switch (c) {
121 case 'c':
122 conf = optarg_;
123 break;
124 case 'h':
125 usage(argv[0], nullptr, conf);
126 return false;
127
128 default:
129 usage(argv[0], optarg_, conf);
130 return false;
131 }
132 }
133
134 if ((c == 1) || (c == -1)) {
135 // standalone param or error
136 usage(argv[0], nullptr, conf);
137 std::cerr << "ERROR: " << std::endl;
138 std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
139 return false;
140 }
141
142 return true;
143}
144
145int main(int argc, const char **argv)
146{
147#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
148 std::shared_ptr<vpDisplay> display;
149#else
150 vpDisplay *display = nullptr;
151#endif
152 try {
153 // Default unix configuration file path
154 std::string opt_conf = "/usr/share/BiclopsDefault.cfg";
155
156 // Read the command line options
157 if (getOptions(argc, argv, opt_conf) == false) {
158 return EXIT_FAILURE;
159 }
160
161 // Initialize PTU
162 vpRobotBiclops robot(opt_conf);
163
164 /*
165 * Biclops DH2 has the following axis orientation
166 *
167 * tilt + <---- (end-effector-frame)
168 * |
169 * \/ pan +
170 *
171 * The end-effector-frame from PT unit rear view is the following
172 *
173 * /\ x
174 * |
175 * (e) ----> y
176 *
177 *
178 *
179 * The camera frame attached to the PT unit is the following (rear view)
180 *
181 * (c) ----> x
182 * |
183 * \/ y
184 *
185 * The corresponding cRe (camera to end-effector rotation matrix) is then the following
186 *
187 * ( 0 1 0)
188 * cRe = (-1 0 0)
189 * ( 0 0 1)
190 *
191 * Translation cte (camera to end-effector) can be neglected
192 *
193 * (0)
194 * cte = (0)
195 * (0)
196 */
197
198 robot.setDenavitHartenbergModel(vpBiclops::DH2);
200 cRe[0][0] = 0; cRe[0][1] = 1; cRe[0][2] = 0;
201 cRe[1][0] = -1; cRe[1][1] = 0; cRe[1][2] = 0;
202 cRe[2][0] = 0; cRe[2][1] = 0; cRe[2][2] = 1;
203 vpTranslationVector cte; // By default set to 0
204
205 // Robot Jacobian (expressed in the end-effector frame)
206 vpMatrix eJe;
207 // Camera to end-effector frame transformation
208 vpHomogeneousMatrix cMe(cte, cRe);
209 // Velocity twist transformation to express a velocity from end-effector to camera frame
210 vpVelocityTwistMatrix cVe(cMe);
211
212 // Initialize grabber
213 vpRealSense2 g;
214 rs2::config config;
215 config.disable_stream(RS2_STREAM_DEPTH);
216 config.disable_stream(RS2_STREAM_INFRARED);
217 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
218 g.open(config);
219
220 std::cout << "Read camera parameters from Realsense device" << std::endl;
223
225 q = 0;
226 std::cout << "Move PT to initial position: " << q.t() << std::endl;
227 robot.setRobotState(vpRobot::STATE_POSITION_CONTROL);
228 robot.setPosition(vpRobot::JOINT_STATE, q);
229
231 g.acquire(I);
232
233 // We open a window using either X11 or GTK or GDI.
234 // Its size is automatically defined by the image (I) size
235#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
236 display = vpDisplayFactory::createDisplay(I, 100, 100, "Display GDI...");
237#else
238 display = vpDisplayFactory::allocateDisplay(I, 100, 100, "Display GDI...");
239#endif
240
243
244 vpDetectorAprilTag detector;
245
247
248 // Create current and desired point visual feature
249 vpFeaturePoint p, pd;
250 // Sets the desired position of the visual feature
251 // Here we set Z desired to 1 meter, and (x,y)=(0,0) to center the tag in the image
252 pd.buildFrom(0, 0, 1);
253
254 // Define the task
255 // - we want an eye-in-hand control law
256 // - joint velocities are computed
257 // - interaction matrix is the one at desired position
259 task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE);
260 task.set_cVe(cVe);
261 // We want to see a point on a point
262 task.addFeature(p, pd);
263 // Set the gain
264 task.setLambda(0.2);
265
266 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
267
268 bool quit = false;
269 bool send_velocities = false;
270 vpColVector q_dot;
271
272 while (!quit) {
273 g.acquire(I);
275
276 {
277 std::stringstream ss;
278 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
279 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
280 }
281
282 if (detector.detect(I)) {
283 // We consider the first tag only
284 vpImagePoint cog = detector.getCog(0); // 0 is the id of the first tag
285
286 vpFeatureBuilder::create(p, cam, cog);
287
288 // Get the jacobian
289 robot.get_eJe(eJe);
290 task.set_eJe(eJe);
291
292 q_dot = task.computeControlLaw();
293
294 vpServoDisplay::display(task, cam, I);
296
297 std::cout << "q_dot: " << q_dot.t() << std::endl;
298
299 std::cout << "|| s - s* || = " << (task.getError()).sumSquare() << std::endl;
300 }
301 else {
302 q_dot = 0;
303 }
304 if (!send_velocities) {
305 q_dot = 0;
306 }
307
308 robot.setVelocity(vpRobot::JOINT_STATE, q_dot);
309
311
313 if (vpDisplay::getClick(I, button, false)) {
314 switch (button) {
316 send_velocities = !send_velocities;
317 break;
318
320 quit = true;
321 q_dot = 0;
322 break;
323
324 default:
325 break;
326 }
327 }
328 }
329
330 std::cout << "Stop the robot " << std::endl;
331 robot.setRobotState(vpRobot::STATE_STOP);
332
333#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
334 if (display != nullptr) {
335 delete display;
336 }
337#endif
338 return EXIT_SUCCESS;
339 }
340 catch (const vpException &e) {
341 std::cout << "Catch an exception: " << e.getMessage() << std::endl;
342#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
343 if (display != nullptr) {
344 delete display;
345 }
346#endif
347 return EXIT_FAILURE;
348 }
349}
350
351#else
352int main()
353{
354 std::cout << "You do not have an Biclops PT robot connected to your computer..." << std::endl;
355 return EXIT_SUCCESS;
356}
357#endif
@ DH2
Second Denavit-Hartenberg representation.
Definition vpBiclops.h:97
static const unsigned int ndof
Number of dof.
Definition vpBiclops.h:101
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
vpRowVector t() const
static const vpColor red
Definition vpColor.h:198
bool detect(const vpImage< unsigned char > &I) VP_OVERRIDE
vpImagePoint getCog(size_t i) const
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
vpFeaturePoint & buildFrom(const double &x, const double &y, const double &Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:131
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Interface for the Biclops, pan, tilt head control.
@ JOINT_STATE
Definition vpRobot.h:79
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition vpRobot.h:65
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
Implementation of a rotation matrix and operations on such kind of matrices.
double sumSquare() const
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:183
@ PSEUDO_INVERSE
Definition vpServo.h:250
@ DESIRED
Definition vpServo.h:223
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.