Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoViper850FourPoints2DArtVelocityLs_cur.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * tests the control law
33 * eye-in-hand control
34 * velocity computed in the articular frame
35 *
36*****************************************************************************/
48#include <visp3/core/vpConfig.h>
49#include <visp3/core/vpDebug.h> // Debug trace
50
51#include <fstream>
52#include <iostream>
53#include <sstream>
54#include <stdio.h>
55#include <stdlib.h>
56#if (defined(VISP_HAVE_VIPER850) && defined(VISP_HAVE_DC1394))
57
58#include <visp3/blob/vpDot2.h>
59#include <visp3/core/vpDisplay.h>
60#include <visp3/core/vpHomogeneousMatrix.h>
61#include <visp3/core/vpImage.h>
62#include <visp3/core/vpIoTools.h>
63#include <visp3/core/vpMath.h>
64#include <visp3/core/vpPoint.h>
65#include <visp3/gui/vpDisplayGTK.h>
66#include <visp3/gui/vpDisplayOpenCV.h>
67#include <visp3/gui/vpDisplayX.h>
68#include <visp3/robot/vpRobotViper850.h>
69#include <visp3/sensor/vp1394TwoGrabber.h>
70#include <visp3/vision/vpPose.h>
71#include <visp3/visual_features/vpFeatureBuilder.h>
72#include <visp3/visual_features/vpFeaturePoint.h>
73#include <visp3/vs/vpServo.h>
74#include <visp3/vs/vpServoDisplay.h>
75
76#define L 0.05 // to deal with a 10cm by 10cm square
77
99void compute_pose(vpPoint point[], vpDot2 dot[], int ndot, vpCameraParameters cam, vpHomogeneousMatrix &cMo, bool init)
100{
102 vpPose pose;
103 vpImagePoint cog;
104 for (int i = 0; i < ndot; i++) {
105
106 double x = 0, y = 0;
107 cog = dot[i].getCog();
109 y); // pixel to meter conversion
110 point[i].set_x(x); // projection perspective p
111 point[i].set_y(y);
112 pose.addPoint(point[i]);
113 }
114
115 if (init == true) {
117 } else { // init = false; use of the previous pose to initialise LOWE
119 }
120}
121
122int main()
123{
124 // Log file creation in /tmp/$USERNAME/log.dat
125 // This file contains by line:
126 // - the 6 computed joint velocities (m/s, rad/s) to achieve the task
127 // - the 6 mesured joint velocities (m/s, rad/s)
128 // - the 6 mesured joint positions (m, rad)
129 // - the 8 values of s - s*
130 std::string username;
131 // Get the user login name
132 vpIoTools::getUserName(username);
133
134 // Create a log filename to save velocities...
135 std::string logdirname;
136 logdirname = "/tmp/" + username;
137
138 // Test if the output path exist. If no try to create it
139 if (vpIoTools::checkDirectory(logdirname) == false) {
140 try {
141 // Create the dirname
142 vpIoTools::makeDirectory(logdirname);
143 } catch (...) {
144 std::cerr << std::endl << "ERROR:" << std::endl;
145 std::cerr << " Cannot create " << logdirname << std::endl;
146 return EXIT_FAILURE;
147 }
148 }
149 std::string logfilename;
150 logfilename = logdirname + "/log.dat";
151
152 // Open the log file name
153 std::ofstream flog(logfilename.c_str());
154
155 try {
156 vpRobotViper850 robot;
157 // Load the end-effector to camera frame transformation obtained
158 // using a camera intrinsic model with distortion
161
162 vpServo task;
163
165 int i;
166
167 bool reset = false;
168 vp1394TwoGrabber g(reset);
170 g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
171 g.open(I);
172
173 g.acquire(I);
174
175#ifdef VISP_HAVE_X11
176 vpDisplayX display(I, 100, 100, "Current image");
177#elif defined(HAVE_OPENCV_HIGHGUI)
178 vpDisplayOpenCV display(I, 100, 100, "Current image");
179#elif defined(VISP_HAVE_GTK)
180 vpDisplayGTK display(I, 100, 100, "Current image");
181#endif
182
185
186 std::cout << std::endl;
187 std::cout << "-------------------------------------------------------" << std::endl;
188 std::cout << " Test program for vpServo " << std::endl;
189 std::cout << " Eye-in-hand task control, velocity computed in the joint space" << std::endl;
190 std::cout << " Use of the Afma6 robot " << std::endl;
191 std::cout << " task : servo 4 points on a square with dimention " << L << " meters" << std::endl;
192 std::cout << "-------------------------------------------------------" << std::endl;
193 std::cout << std::endl;
194
195 vpDot2 dot[4];
196 vpImagePoint cog;
197
198 std::cout << "Click on the 4 dots clockwise starting from upper/left dot..." << std::endl;
199
200 for (i = 0; i < 4; i++) {
201 dot[i].setGraphics(true);
202 dot[i].initTracking(I);
203 cog = dot[i].getCog();
206 }
207
209
210 // Update camera parameters
211 robot.getCameraParameters(cam, I);
212
213 cam.printParameters();
214
215 // Sets the current position of the visual feature
216 vpFeaturePoint p[4];
217 for (i = 0; i < 4; i++)
218 vpFeatureBuilder::create(p[i], cam, dot[i]); // retrieve x,y of the vpFeaturePoint structure
219
220 // Set the position of the square target in a frame which origin is
221 // centered in the middle of the square
222 vpPoint point[4];
223 point[0].setWorldCoordinates(-L, -L, 0);
224 point[1].setWorldCoordinates(L, -L, 0);
225 point[2].setWorldCoordinates(L, L, 0);
226 point[3].setWorldCoordinates(-L, L, 0);
227
228 // Initialise a desired pose to compute s*, the desired 2D point features
230 vpTranslationVector cto(0, 0, 0.5); // tz = 0.5 meter
232 vpRotationMatrix cRo(cro); // Build the rotation matrix
233 cMo.buildFrom(cto, cRo); // Build the homogeneous matrix
234
235 // Sets the desired position of the 2D visual feature
236 vpFeaturePoint pd[4];
237 // Compute the desired position of the features from the desired pose
238 for (int i = 0; i < 4; i++) {
239 vpColVector cP, p;
240 point[i].changeFrame(cMo, cP);
241 point[i].projection(cP, p);
242
243 pd[i].set_x(p[0]);
244 pd[i].set_y(p[1]);
245 pd[i].set_Z(cP[2]);
246 }
247
248 // We want to see a point on a point
249 for (i = 0; i < 4; i++)
250 task.addFeature(p[i], pd[i]);
251
252 // Set the proportional gain
253 task.setLambda(0.3);
254
255 // Display task information
256 task.print();
257
258 // Define the task
259 // - we want an eye-in-hand control law
260 // - articular velocity are computed
263 task.print();
264
266 robot.get_cVe(cVe);
267 task.set_cVe(cVe);
268 task.print();
269
270 // Set the Jacobian (expressed in the end-effector frame)
271 vpMatrix eJe;
272 robot.get_eJe(eJe);
273 task.set_eJe(eJe);
274 task.print();
275
276 // Initialise the velocity control of the robot
278
279 std::cout << "\nHit CTRL-C to stop the loop...\n" << std::flush;
280 bool init_pose_from_linear_method = true;
281 for (;;) {
282 // Acquire a new image from the camera
283 g.acquire(I);
284
285 // Display this image
287
288 try {
289 // For each point...
290 for (i = 0; i < 4; i++) {
291 // Achieve the tracking of the dot in the image
292 dot[i].track(I);
293 // Display a green cross at the center of gravity position in the
294 // image
295 cog = dot[i].getCog();
297 }
298 } catch (...) {
299 flog.close(); // Close the log file
300 vpTRACE("Error detected while tracking visual features");
301 robot.stopMotion();
302 return EXIT_FAILURE;
303 }
304
305 // At first iteration, we initialise non linear pose estimation with a linear approach.
306 // For the other iterations, non linear pose estimation is initialized with the pose estimated at previous
307 // iteration of the loop
308 compute_pose(point, dot, 4, cam, cMo, init_pose_from_linear_method);
309 if (init_pose_from_linear_method) {
310 init_pose_from_linear_method = false;
311 }
312
313 for (i = 0; i < 4; i++) {
314 // Update the point feature from the dot location
315 vpFeatureBuilder::create(p[i], cam, dot[i]);
316 // Set the feature Z coordinate from the pose
317 vpColVector cP;
318 point[i].changeFrame(cMo, cP);
319
320 p[i].set_Z(cP[2]);
321 }
322
323 // Get the jacobian of the robot
324 robot.get_eJe(eJe);
325 // Update this jacobian in the task structure. It will be used to
326 // compute the velocity skew (as an articular velocity) qdot = -lambda *
327 // L^+ * cVe * eJe * (s-s*)
328 task.set_eJe(eJe);
329
330 vpColVector v;
331 // Compute the visual servoing skew vector
332 v = task.computeControlLaw();
333
334 // Display the current and desired feature points in the image display
335 vpServoDisplay::display(task, cam, I);
336
337 // Apply the computed joint velocities to the robot
339
340 // Save velocities applied to the robot in the log file
341 // v[0], v[1], v[2] correspond to joint translation velocities in m/s
342 // v[3], v[4], v[5] correspond to joint rotation velocities in rad/s
343 flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
344
345 // Get the measured joint velocities of the robot
346 vpColVector qvel;
348 // Save measured joint velocities of the robot in the log file:
349 // - qvel[0], qvel[1], qvel[2] correspond to measured joint translation
350 // velocities in m/s
351 // - qvel[3], qvel[4], qvel[5] correspond to measured joint rotation
352 // velocities in rad/s
353 flog << qvel[0] << " " << qvel[1] << " " << qvel[2] << " " << qvel[3] << " " << qvel[4] << " " << qvel[5] << " ";
354
355 // Get the measured joint positions of the robot
356 vpColVector q;
357 robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
358 // Save measured joint positions of the robot in the log file
359 // - q[0], q[1], q[2] correspond to measured joint translation
360 // positions in m
361 // - q[3], q[4], q[5] correspond to measured joint rotation
362 // positions in rad
363 flog << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << " " << q[4] << " " << q[5] << " ";
364
365 // Save feature error (s-s*) for the 4 feature points. For each feature
366 // point, we have 2 errors (along x and y axis). This error is
367 // expressed in meters in the camera frame
368 flog << (task.getError()).t() << std::endl;
369
370 // Flush the display
372
373 // std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<
374 // std::endl;
375 }
376
377 std::cout << "Display task information: " << std::endl;
378 task.print();
379 flog.close(); // Close the log file
380 return EXIT_SUCCESS;
381 } catch (const vpException &e) {
382 flog.close(); // Close the log file
383 std::cout << "Catch an exception: " << e.getMessage() << std::endl;
384 return EXIT_FAILURE;
385 }
386}
387
388#else
389int main()
390{
391 std::cout << "You do not have an Viper 850 robot connected to your computer..." << std::endl;
392 return EXIT_SUCCESS;
393}
394#endif
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
vpRowVector t() const
static const vpColor blue
Definition vpColor.h:217
static const vpColor green
Definition vpColor.h:214
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static void display(const vpImage< unsigned char > &I)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition vpDot2.h:124
void track(const vpImage< unsigned char > &I, bool canMakeTheWindowGrow=true)
Definition vpDot2.cpp:441
void setGraphics(bool activate)
Definition vpDot2.h:311
vpImagePoint getCog() const
Definition vpDot2.h:177
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition vpDot2.cpp:252
error that can be emitted by ViSP classes.
Definition vpException.h:59
const char * getMessage() const
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_y(double y)
void set_x(double x)
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
static bool checkDirectory(const std::string &dirname)
static std::string getUserName()
static void makeDirectory(const std::string &dirname)
static double rad(double deg)
Definition vpMath.h:116
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:508
void projection(const vpColVector &_cP, vpColVector &_p) const
Definition vpPoint.cpp:219
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const
Definition vpPoint.cpp:236
void setWorldCoordinates(double oX, double oY, double oZ)
Definition vpPoint.cpp:110
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:510
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition vpPose.h:81
void addPoint(const vpPoint &P)
Definition vpPose.cpp:140
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
Definition vpPose.h:102
@ VIRTUAL_VS
Definition vpPose.h:96
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, bool(*func)(const vpHomogeneousMatrix &)=NULL)
Definition vpPose.cpp:469
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
void getVelocity(const vpRobot::vpControlFrameType frame, vpColVector &velocity)
void get_eJe(vpMatrix &eJe)
@ ARTICULAR_FRAME
Definition vpRobot.h:76
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition vpRobot.cpp:198
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:155
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition vpServo.h:448
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition vpServo.cpp:299
void setLambda(double c)
Definition vpServo.h:403
void set_eJe(const vpMatrix &eJe_)
Definition vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
@ PSEUDO_INVERSE
Definition vpServo.h:199
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Class that consider the case of a translation vector.
vpVelocityTwistMatrix get_cVe() const
Definition vpUnicycle.h:79
@ TOOL_PTGREY_FLEA2_CAMERA
Definition vpViper850.h:127
#define vpTRACE
Definition vpDebug.h:411