Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoFrankaPBVS.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Data acquisition with RealSense RGB-D sensor and Franka robot.
33 *
34*****************************************************************************/
35
59#include <iostream>
60
61#include <visp3/core/vpCameraParameters.h>
62#include <visp3/detection/vpDetectorAprilTag.h>
63#include <visp3/gui/vpDisplayGDI.h>
64#include <visp3/gui/vpDisplayX.h>
65#include <visp3/gui/vpPlot.h>
66#include <visp3/io/vpImageIo.h>
67#include <visp3/robot/vpRobotFranka.h>
68#include <visp3/sensor/vpRealSense2.h>
69#include <visp3/visual_features/vpFeatureThetaU.h>
70#include <visp3/visual_features/vpFeatureTranslation.h>
71#include <visp3/vs/vpServo.h>
72#include <visp3/vs/vpServoDisplay.h>
73
74#if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) && \
75 (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_FRANKA)
76
77void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
78 std::vector<vpImagePoint> *traj_vip)
79{
80 for (size_t i = 0; i < vip.size(); i++) {
81 if (traj_vip[i].size()) {
82 // Add the point only if distance with the previous > 1 pixel
83 if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
84 traj_vip[i].push_back(vip[i]);
85 }
86 } else {
87 traj_vip[i].push_back(vip[i]);
88 }
89 }
90 for (size_t i = 0; i < vip.size(); i++) {
91 for (size_t j = 1; j < traj_vip[i].size(); j++) {
92 vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
93 }
94 }
95}
96
97int main(int argc, char **argv)
98{
99 double opt_tagSize = 0.120;
100 std::string opt_robot_ip = "192.168.1.1";
101 std::string opt_eMc_filename = "";
102 bool display_tag = true;
103 int opt_quad_decimate = 2;
104 bool opt_verbose = false;
105 bool opt_plot = false;
106 bool opt_adaptive_gain = false;
107 bool opt_task_sequencing = false;
108 double convergence_threshold_t = 0.0005; // Value in [m]
109 double convergence_threshold_tu = 0.5; // Value in [deg]
110
111 for (int i = 1; i < argc; i++) {
112 if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
113 opt_tagSize = std::stod(argv[i + 1]);
114 } else if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
115 opt_robot_ip = std::string(argv[i + 1]);
116 } else if (std::string(argv[i]) == "--eMc" && i + 1 < argc) {
117 opt_eMc_filename = std::string(argv[i + 1]);
118 } else if (std::string(argv[i]) == "--verbose") {
119 opt_verbose = true;
120 } else if (std::string(argv[i]) == "--plot") {
121 opt_plot = true;
122 } else if (std::string(argv[i]) == "--adaptive_gain") {
123 opt_adaptive_gain = true;
124 } else if (std::string(argv[i]) == "--task_sequencing") {
125 opt_task_sequencing = true;
126 } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
127 opt_quad_decimate = std::stoi(argv[i + 1]);
128 } else if (std::string(argv[i]) == "--no-convergence-threshold") {
129 convergence_threshold_t = 0.;
130 convergence_threshold_tu = 0.;
131 } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
132 std::cout
133 << argv[0] << " [--ip <default " << opt_robot_ip << ">] [--tag_size <marker size in meter; default "
134 << opt_tagSize << ">] [--eMc <eMc extrinsic file>] "
135 << "[--quad_decimate <decimation; default " << opt_quad_decimate
136 << ">] [--adaptive_gain] [--plot] [--task_sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
137 << "\n";
138 return EXIT_SUCCESS;
139 }
140 }
141
142 vpRobotFranka robot;
143
144 try {
145 robot.connect(opt_robot_ip);
146
147 vpRealSense2 rs;
148 rs2::config config;
149 unsigned int width = 640, height = 480;
150 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
151 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
152 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
153 rs.open(config);
154
155 // Get camera extrinsics
156 vpPoseVector ePc;
157 // Set camera extrinsics default values
158 ePc[0] = 0.0337731;
159 ePc[1] = -0.00535012;
160 ePc[2] = -0.0523339;
161 ePc[3] = -0.247294;
162 ePc[4] = -0.306729;
163 ePc[5] = 1.53055;
164
165 // If provided, read camera extrinsics from --eMc <file>
166 if (!opt_eMc_filename.empty()) {
167 ePc.loadYAML(opt_eMc_filename, ePc);
168 } else {
169 std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values."
170 << "\n";
171 }
172 vpHomogeneousMatrix eMc(ePc);
173 std::cout << "eMc:\n" << eMc << "\n";
174
175 // Get camera intrinsics
178 std::cout << "cam:\n" << cam << "\n";
179
180 vpImage<unsigned char> I(height, width);
181
182#if defined(VISP_HAVE_X11)
183 vpDisplayX dc(I, 10, 10, "Color image");
184#elif defined(VISP_HAVE_GDI)
185 vpDisplayGDI dc(I, 10, 10, "Color image");
186#endif
187
190 // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
191 vpDetectorAprilTag detector(tagFamily);
192 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
193 detector.setDisplayTag(display_tag);
194 detector.setAprilTagQuadDecimate(opt_quad_decimate);
195
196 // Servo
197 vpHomogeneousMatrix cdMc, cMo, oMo;
198
199 // Desired pose to reach
200 vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
201 vpRotationMatrix({1, 0, 0, 0, -1, 0, 0, 0, -1}));
202
203 cdMc = cdMo * cMo.inverse();
206 t.buildFrom(cdMc);
207 tu.buildFrom(cdMc);
208
211
212 vpServo task;
213 task.addFeature(t, td);
214 task.addFeature(tu, tud);
217
218 if (opt_adaptive_gain) {
219 vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
220 task.setLambda(lambda);
221 } else {
222 task.setLambda(0.5);
223 }
224
225 vpPlot *plotter = nullptr;
226 int iter_plot = 0;
227
228 if (opt_plot) {
229 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
230 "Real time curves plotter");
231 plotter->setTitle(0, "Visual features error");
232 plotter->setTitle(1, "Camera velocities");
233 plotter->initGraph(0, 6);
234 plotter->initGraph(1, 6);
235 plotter->setLegend(0, 0, "error_feat_tx");
236 plotter->setLegend(0, 1, "error_feat_ty");
237 plotter->setLegend(0, 2, "error_feat_tz");
238 plotter->setLegend(0, 3, "error_feat_theta_ux");
239 plotter->setLegend(0, 4, "error_feat_theta_uy");
240 plotter->setLegend(0, 5, "error_feat_theta_uz");
241 plotter->setLegend(1, 0, "vc_x");
242 plotter->setLegend(1, 1, "vc_y");
243 plotter->setLegend(1, 2, "vc_z");
244 plotter->setLegend(1, 3, "wc_x");
245 plotter->setLegend(1, 4, "wc_y");
246 plotter->setLegend(1, 5, "wc_z");
247 }
248
249 bool final_quit = false;
250 bool has_converged = false;
251 bool send_velocities = false;
252 bool servo_started = false;
253 std::vector<vpImagePoint> *traj_vip = nullptr; // To memorize point trajectory
254
255 static double t_init_servo = vpTime::measureTimeMs();
256
257 robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
259
260 while (!has_converged && !final_quit) {
261 double t_start = vpTime::measureTimeMs();
262
263 rs.acquire(I);
264
266
267 std::vector<vpHomogeneousMatrix> cMo_vec;
268 detector.detect(I, opt_tagSize, cam, cMo_vec);
269
270 std::stringstream ss;
271 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
272 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
273
274 vpColVector v_c(6);
275
276 // Only one tag is detected
277 if (cMo_vec.size() == 1) {
278 cMo = cMo_vec[0];
279
280 static bool first_time = true;
281 if (first_time) {
282 // Introduce security wrt tag positioning in order to avoid PI rotation
283 std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
284 v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
285 for (size_t i = 0; i < 2; i++) {
286 v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
287 }
288 if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
289 oMo = v_oMo[0];
290 } else {
291 std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
292 oMo = v_oMo[1]; // Introduce PI rotation
293 }
294 }
295
296 // Update visual features
297 cdMc = cdMo * oMo * cMo.inverse();
298 t.buildFrom(cdMc);
299 tu.buildFrom(cdMc);
300
301 if (opt_task_sequencing) {
302 if (!servo_started) {
303 if (send_velocities) {
304 servo_started = true;
305 }
306 t_init_servo = vpTime::measureTimeMs();
307 }
308 v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
309 } else {
310 v_c = task.computeControlLaw();
311 }
312
313 // Display desired and current pose features
314 vpDisplay::displayFrame(I, cdMo * oMo, cam, opt_tagSize / 1.5, vpColor::yellow, 2);
315 vpDisplay::displayFrame(I, cMo, cam, opt_tagSize / 2, vpColor::none, 3);
316 // Get tag corners
317 std::vector<vpImagePoint> vip = detector.getPolygon(0);
318 // Get the tag cog corresponding to the projection of the tag frame in the image
319 vip.push_back(detector.getCog(0));
320 // Display the trajectory of the points
321 if (first_time) {
322 traj_vip = new std::vector<vpImagePoint>[vip.size()];
323 }
324 display_point_trajectory(I, vip, traj_vip);
325
326 if (opt_plot) {
327 plotter->plot(0, iter_plot, task.getError());
328 plotter->plot(1, iter_plot, v_c);
329 iter_plot++;
330 }
331
332 if (opt_verbose) {
333 std::cout << "v_c: " << v_c.t() << std::endl;
334 }
335
336 vpTranslationVector cd_t_c = cdMc.getTranslationVector();
337 vpThetaUVector cd_tu_c = cdMc.getThetaUVector();
338 double error_tr = sqrt(cd_t_c.sumSquare());
339 double error_tu = vpMath::deg(sqrt(cd_tu_c.sumSquare()));
340
341 ss.str("");
342 ss << "error_t: " << error_tr;
343 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
344 ss.str("");
345 ss << "error_tu: " << error_tu;
346 vpDisplay::displayText(I, 40, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
347
348 if (opt_verbose)
349 std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
350
351 if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
352 has_converged = true;
353 std::cout << "Servo task has converged" << std::endl;
354 ;
355 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
356 }
357
358 if (first_time) {
359 first_time = false;
360 }
361 } // end if (cMo_vec.size() == 1)
362 else {
363 v_c = 0;
364 }
365
366 if (!send_velocities) {
367 v_c = 0;
368 }
369
370 // Send to the robot
372
373 ss.str("");
374 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
375 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
377
379 if (vpDisplay::getClick(I, button, false)) {
380 switch (button) {
382 send_velocities = !send_velocities;
383 break;
384
386 final_quit = true;
387 v_c = 0;
388 break;
389
390 default:
391 break;
392 }
393 }
394 }
395 std::cout << "Stop the robot " << std::endl;
397
398 if (opt_plot && plotter != nullptr) {
399 delete plotter;
400 plotter = nullptr;
401 }
402
403 if (!final_quit) {
404 while (!final_quit) {
405 rs.acquire(I);
407
408 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
409 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
410
411 if (vpDisplay::getClick(I, false)) {
412 final_quit = true;
413 }
414
416 }
417 }
418
419 if (traj_vip) {
420 delete[] traj_vip;
421 }
422 } catch (const vpException &e) {
423 std::cout << "ViSP exception: " << e.what() << std::endl;
424 std::cout << "Stop the robot " << std::endl;
426 return EXIT_FAILURE;
427 } catch (const franka::NetworkException &e) {
428 std::cout << "Franka network exception: " << e.what() << std::endl;
429 std::cout << "Check if you are connected to the Franka robot"
430 << " or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
431 << std::endl;
432 return EXIT_FAILURE;
433 } catch (const std::exception &e) {
434 std::cout << "Franka exception: " << e.what() << std::endl;
435 return EXIT_FAILURE;
436 }
437
438 return EXIT_SUCCESS;
439}
440#else
441int main()
442{
443#if !defined(VISP_HAVE_REALSENSE2)
444 std::cout << "Install librealsense-2.x" << std::endl;
445#endif
446#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
447 std::cout << "Build ViSP with c++11 or higher compiler flag (cmake -DUSE_CXX_STANDARD=11)." << std::endl;
448#endif
449#if !defined(VISP_HAVE_FRANKA)
450 std::cout << "Install libfranka." << std::endl;
451#endif
452 return EXIT_SUCCESS;
453}
454#endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=NULL)
Definition vpArray2D.h:696
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:211
static const vpColor none
Definition vpColor.h:223
static const vpColor yellow
Definition vpColor.h:219
static const vpColor green
Definition vpColor.h:214
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:59
const char * what() const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotati...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
static double deg(double rad)
Definition vpMath.h:106
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:113
void initGraph(unsigned int graphNum, unsigned int curveNbr)
Definition vpPlot.cpp:202
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
Definition vpPlot.cpp:545
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
Definition vpPlot.cpp:269
void setTitle(unsigned int graphNum, const std::string &title)
Definition vpPlot.cpp:503
Implementation of a pose vector and operations on poses.
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
bool open(const rs2::config &cfg=rs2::config())
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ CAMERA_FRAME
Definition vpRobot.h:80
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition vpRobot.cpp:198
Implementation of a rotation matrix and operations on such kind of matrices.
double sumSquare() const
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_CAMERA
Definition vpServo.h:151
void setLambda(double c)
Definition vpServo.h:403
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
VISP_EXPORT double measureTimeMs()