Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
tutorial-mb-generic-tracker-rgbd-realsense-json-settings.cpp
1
2#include <iostream>
3
4#include <visp3/core/vpConfig.h>
5
6#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_OPENCV) && defined(VISP_HAVE_NLOHMANN_JSON)
7#include <visp3/core/vpDisplay.h>
8#include <visp3/core/vpIoTools.h>
9#include <visp3/gui/vpDisplayGDI.h>
10#include <visp3/gui/vpDisplayOpenCV.h>
11#include <visp3/gui/vpDisplayX.h>
12#include <visp3/mbt/vpMbGenericTracker.h>
13#include <visp3/sensor/vpRealSense2.h>
14
15#include <nlohmann/json.hpp>
16using json = nlohmann::json;
17
18
19int main(int argc, char *argv [])
20{
21 std::string config_file = "";
22 std::string model = "";
23 std::string init_file = "";
24
25 double proj_error_threshold = 25;
26
27 for (int i = 1; i < argc; i++) {
28 if (std::string(argv[i]) == "--config" && i + 1 < argc) {
29 config_file = std::string(argv[i + 1]);
30 }
31 else if (std::string(argv[i]) == "--model" && i + 1 < argc) {
32 model = std::string(argv[i + 1]);
33 }
34 else if (std::string(argv[i]) == "--init_file" && i + 1 < argc) {
35 init_file = std::string(argv[i + 1]);
36 }
37 else if (std::string(argv[i]) == "--proj_error_threshold" && i + 1 < argc) {
38 proj_error_threshold = std::atof(argv[i + 1]);
39 }
40 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
41 std::cout << "Usage: \n"
42 << argv[0]
43 << "--config <settings.json>"
44 << " --model <object.cao>"
45 " --init_file <object.init>"
46 " [--proj_error_threshold <threshold between 0 and 90> (default: "
47 << proj_error_threshold
48 << ")]"
49 << std::endl;
50
51 std::cout << "\n** How to track a 4.2 cm width cube with manual initialization:\n"
52 << argv[0] << "--config model/cube/rgbd-tracker.json --model model/cube/cube.cao" << std::endl;
53 return EXIT_SUCCESS;
54 }
55 }
56
57 std::cout << "Config files: " << std::endl;
58 std::cout << " JSON config: "
59 << "\"" << config_file << "\"" << std::endl;
60 std::cout << " Model: "
61 << "\"" << model << "\"" << std::endl;
62 std::cout << " Init file: "
63 << "\"" << init_file << "\"" << std::endl;
64
65 if (config_file.empty()) {
66 std::cout << "No JSON configuration was provided!" << std::endl;
67 return EXIT_FAILURE;
68 }
70 vpRealSense2 realsense;
71 int width = 640, height = 480;
72 int fps = 30;
73 rs2::config config;
74 config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
75 config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
76
77 try {
78 realsense.open(config);
79 }
80 catch (const vpException &e) {
81 std::cout << "Catch an exception: " << e.what() << std::endl;
82 std::cout << "Check if the Realsense camera is connected..." << std::endl;
83 return EXIT_SUCCESS;
84 }
85
86 vpCameraParameters cam_color =
88 vpCameraParameters cam_depth =
90
91 std::cout << "Sensor internal camera parameters for color camera: " << cam_color << std::endl;
92 std::cout << "Sensor internal camera parameters for depth camera: " << cam_depth << std::endl;
93
94 vpImage<vpRGBa> I_color(height, width); // acquired by the realsense
95 vpImage<unsigned char> I_gray(height, width); // Fed to the tracker if we use edge or KLT features
96 vpImage<unsigned char> I_depth(height, width); // Depth histogram used for display
97 vpImage<uint16_t> I_depth_raw(height, width); // Raw depth acquired by the realsense
98 std::vector<vpColVector> pointcloud; // fed to the tracker
100
101 vpHomogeneousMatrix depth_M_color = realsense.getTransformation(RS2_STREAM_COLOR, RS2_STREAM_DEPTH);
102 std::map<std::string, vpHomogeneousMatrix> mapOfCameraTransformations;
103 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
104 std::map<std::string, std::string> mapOfInitFiles;
105 std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
106 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
107 std::map<std::string, vpCameraParameters> mapOfCameraIntrinsics;
108
110 vpMbGenericTracker tracker;
111 tracker.loadConfigFile(config_file);
113 if (model.empty() && init_file.empty()) {
114 std::ifstream config(config_file);
115 const json j = json::parse(config);
116 config.close();
117 if (j.contains("model")) {
118 model = j["model"];
119 }
120 else {
121 std::cerr << "No model was provided in either JSON file or arguments" << std::endl;
122 return EXIT_FAILURE;
123 }
124 }
125 if (init_file.empty()) {
126 const std::string parentname = vpIoTools::getParent(model);
127 init_file = (parentname.empty() ? "" : (parentname + "/")) + vpIoTools::getNameWE(model) + ".init";
128 }
129
131 std::string color_key = "", depth_key = "";
132 for (const auto &tracker_type : tracker.getCameraTrackerTypes()) {
133 std::cout << "tracker key == " << tracker_type.first << std::endl;
134 // Initialise for color features
135 if (tracker_type.second & vpMbGenericTracker::EDGE_TRACKER || tracker_type.second & vpMbGenericTracker::KLT_TRACKER) {
136 color_key = tracker_type.first;
137 mapOfImages[color_key] = &I_gray;
138 mapOfInitFiles[color_key] = init_file;
139 mapOfWidths[color_key] = width;
140 mapOfHeights[color_key] = height;
141 mapOfCameraIntrinsics[color_key] = cam_color;
142 }
143 // Initialize for depth features
144 if (tracker_type.second & vpMbGenericTracker::DEPTH_DENSE_TRACKER || tracker_type.second & vpMbGenericTracker::DEPTH_NORMAL_TRACKER) {
145 depth_key = tracker_type.first;
146 mapOfImages[depth_key] = &I_depth;
147 mapOfWidths[depth_key] = width;
148 mapOfHeights[depth_key] = height;
149 mapOfCameraIntrinsics[depth_key] = cam_depth;
150 mapOfCameraTransformations[depth_key] = depth_M_color;
151 mapOfPointclouds[depth_key] = &pointcloud;
152 }
153 }
154 const bool use_depth = !depth_key.empty();
155 const bool use_color = !color_key.empty();
158 if (tracker.getNbPolygon() == 0) { // Not already loaded by JSON
159 tracker.loadModel(model);
160 }
162
164 std::cout << "Updating configuration with parameters provided by RealSense SDK..." << std::endl;
165 tracker.setCameraParameters(mapOfCameraIntrinsics);
166 if (use_color && use_depth) {
167 tracker.setCameraTransformationMatrix(mapOfCameraTransformations);
168 }
170 unsigned int _posx = 100, _posy = 50;
171
172#ifdef VISP_HAVE_X11
173 vpDisplayX d1, d2;
174#elif defined(VISP_HAVE_GDI)
175 vpDisplayGDI d1, d2;
176#elif defined(HAVE_OPENCV_HIGHGUI)
177 vpDisplayOpenCV d1, d2;
178#endif
179 if (use_color)
180 d1.init(I_gray, _posx, _posy, "Color stream");
181 if (use_depth)
182 d2.init(I_depth, _posx + I_gray.getWidth() + 10, _posy, "Depth stream");
183
184 while (true) {
185 realsense.acquire((unsigned char *)I_color.bitmap, (unsigned char *)I_depth_raw.bitmap, NULL, NULL);
186
187 if (use_color) {
188 vpImageConvert::convert(I_color, I_gray);
189 vpDisplay::display(I_gray);
190 vpDisplay::displayText(I_gray, 20, 20, "Click when ready.", vpColor::red);
191 vpDisplay::flush(I_gray);
192
193 if (vpDisplay::getClick(I_gray, false)) {
194 break;
195 }
196 }
197 if (use_depth) {
198 vpImageConvert::createDepthHistogram(I_depth_raw, I_depth);
199
200 vpDisplay::display(I_depth);
201 vpDisplay::displayText(I_depth, 20, 20, "Click when ready.", vpColor::red);
202 vpDisplay::flush(I_depth);
203
204 if (vpDisplay::getClick(I_depth, false)) {
205 break;
206 }
207 }
208 }
209
210 tracker.setProjectionErrorComputation(true);
212 tracker.initClick(mapOfImages, mapOfInitFiles, true);
215 std::vector<double> times_vec;
216 try {
217 bool quit = false;
218 double loop_t = 0;
220
221 while (!quit) {
222 double t = vpTime::measureTimeMs();
223 bool tracking_failed = false;
224
225 // Acquire images and update tracker input data
226 realsense.acquire((unsigned char *)I_color.bitmap, (unsigned char *)I_depth_raw.bitmap, &pointcloud, NULL, NULL);
227
228 if (use_color) {
229 vpImageConvert::convert(I_color, I_gray);
230 vpDisplay::display(I_gray);
231 }
232 if (use_depth) {
233 vpImageConvert::createDepthHistogram(I_depth_raw, I_depth);
234 vpDisplay::display(I_depth);
235 }
236
237 if (use_color && use_depth) {
238 mapOfImages[color_key] = &I_gray;
239 mapOfPointclouds[depth_key] = &pointcloud;
240 mapOfWidths[depth_key] = width;
241 mapOfHeights[depth_key] = height;
242 }
243 else if (use_color) {
244 mapOfImages[color_key] = &I_gray;
245 }
246 else if (use_depth) {
247 mapOfPointclouds[depth_key] = &pointcloud;
248 }
249
250 // Run the tracker
251 try {
252 if (use_color && use_depth) {
253 tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
254 }
255 else if (use_color) {
256 tracker.track(I_gray);
257 }
258 else if (use_depth) {
259 tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
260 }
261 }
262 catch (const vpException &e) {
263 std::cout << "Tracker exception: " << e.getStringMessage() << std::endl;
264 tracking_failed = true;
265 }
266
267 // Get object pose
268 cMo = tracker.getPose();
269
270 // Check tracking errors
271 double proj_error = 0;
273 // Check tracking errors
274 proj_error = tracker.getProjectionError();
275 }
276 else {
277 proj_error = tracker.computeCurrentProjectionError(I_gray, cMo, cam_color);
278 }
279
280 if (proj_error > proj_error_threshold) {
281 std::cout << "Tracker needs to restart (projection error detected: " << proj_error << ")" << std::endl;
282 }
283
284 // Display tracking results
285 if (!tracking_failed) {
286 // Turn display features on
287 tracker.setDisplayFeatures(true);
288
289 if (use_color && use_depth) {
290 tracker.display(I_gray, I_depth, cMo, depth_M_color * cMo, cam_color, cam_depth, vpColor::red, 3);
291 vpDisplay::displayFrame(I_gray, cMo, cam_color, 0.05, vpColor::none, 3);
292 vpDisplay::displayFrame(I_depth, depth_M_color * cMo, cam_depth, 0.05, vpColor::none, 3);
293 }
294 else if (use_color) {
295 tracker.display(I_gray, cMo, cam_color, vpColor::red, 3);
296 vpDisplay::displayFrame(I_gray, cMo, cam_color, 0.05, vpColor::none, 3);
297 }
298 else if (use_depth) {
299 tracker.display(I_depth, cMo, cam_depth, vpColor::red, 3);
300 vpDisplay::displayFrame(I_depth, cMo, cam_depth, 0.05, vpColor::none, 3);
301 }
302
303 { // Display total number of features
304 std::stringstream ss;
305 ss << "Nb features: " << tracker.getError().size();
306 vpDisplay::displayText(I_gray, I_gray.getHeight() - 50, 20, ss.str(), vpColor::red);
307 }
308 { // Display number of feature per type
309 std::stringstream ss;
310 ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt()
311 << ", depth dense " << tracker.getNbFeaturesDepthDense() << ", depth normal" << tracker.getNbFeaturesDepthNormal();
312 vpDisplay::displayText(I_gray, I_gray.getHeight() - 30, 20, ss.str(), vpColor::red);
313 }
314 }
315
316 std::stringstream ss;
317 loop_t = vpTime::measureTimeMs() - t;
318 times_vec.push_back(loop_t);
319 ss << "Loop time: " << loop_t << " ms";
320
322 if (use_color) {
323 vpDisplay::displayText(I_gray, 20, 20, ss.str(), vpColor::red);
324 vpDisplay::displayText(I_gray, 35, 20, "Right click: quit", vpColor::red);
325
326 vpDisplay::flush(I_gray);
327
328 if (vpDisplay::getClick(I_gray, button, false)) {
329 if (button == vpMouseButton::button3) {
330 quit = true;
331 }
332 }
333 }
334 if (use_depth) {
335 vpDisplay::displayText(I_depth, 20, 20, ss.str(), vpColor::red);
336 vpDisplay::displayText(I_depth, 40, 20, "Click to quit", vpColor::red);
337 vpDisplay::flush(I_depth);
338
339 if (vpDisplay::getClick(I_depth, false)) {
340 quit = true;
341 }
342 }
343
344 }
345 }
346 catch (const vpException &e) {
347 std::cout << "Caught an exception: " << e.what() << std::endl;
348 }
349 // Show tracking performance
350 if (!times_vec.empty()) {
351 std::cout << "\nProcessing time, Mean: " << vpMath::getMean(times_vec)
352 << " ms ; Median: " << vpMath::getMedian(times_vec) << " ; Std: " << vpMath::getStdev(times_vec) << " ms"
353 << std::endl;
354 }
356 return EXIT_SUCCESS;
357}
358#elif defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_OPENCV)
359int main()
360{
361 std::cout << "Install the JSON 3rd party library (Nlohmann JSON), reconfigure VISP and build again" << std::endl;
362 return EXIT_SUCCESS;
363}
364#elif defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_NLOHMANN_JSON)
365int main()
366{
367 std::cout << "Install OpenCV, reconfigure VISP and build again" << std::endl;
368 return EXIT_SUCCESS;
369}
370#else
371int main()
372{
373 std::cout << "Install librealsense2 3rd party, configure and build ViSP again to use this example" << std::endl;
374 return EXIT_SUCCESS;
375}
376#endif
unsigned int size() const
Return the number of elements of the 2D array.
Definition vpArray2D.h:292
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static const vpColor red
Definition vpColor.h:211
static const vpColor none
Definition vpColor.h:223
Display for windows using GDI (available on any windows 32 platform).
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
void init(vpImage< unsigned char > &I, int win_x=-1, int win_y=-1, const std::string &win_title="")
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:59
const std::string & getStringMessage() const
const char * what() const
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void createDepthHistogram(const vpImage< uint16_t > &src_depth, vpImage< vpRGBa > &dest_rgba)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:135
static std::string getNameWE(const std::string &pathname)
static std::string getParent(const std::string &pathname)
static double getMedian(const std::vector< double > &v)
Definition vpMath.cpp:314
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition vpMath.cpp:345
static double getMean(const std::vector< double > &v)
Definition vpMath.cpp:294
Real-time 6D object pose tracking using its CAD model.
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void getPose(vpHomogeneousMatrix &cMo) const
virtual void setDisplayFeatures(bool displayF)
virtual int getTrackerType() const
virtual void setProjectionErrorComputation(const bool &flag)
virtual unsigned int getNbFeaturesEdge() const
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual unsigned int getNbFeaturesKlt() const
virtual unsigned int getNbFeaturesDepthDense() const
virtual unsigned int getNbPolygon() const
virtual unsigned int getNbFeaturesDepthNormal() const
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual vpColVector getError() const
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
virtual void track(const vpImage< unsigned char > &I)
virtual double getProjectionError() const
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
bool open(const rs2::config &cfg=rs2::config())
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
VISP_EXPORT double measureTimeMs()