Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
tutorial-face-detector-live-threaded.cpp
1
2#include <iostream>
3
4#include <visp3/core/vpImageConvert.h>
5#include <visp3/core/vpMutex.h>
6#include <visp3/core/vpThread.h>
7#include <visp3/core/vpTime.h>
8#include <visp3/detection/vpDetectorFace.h>
9#include <visp3/gui/vpDisplayGDI.h>
10#include <visp3/gui/vpDisplayX.h>
11#include <visp3/sensor/vpV4l2Grabber.h>
12
13#if defined(HAVE_OPENCV_OBJDETECT) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEOIO) && (defined(VISP_HAVE_PTHREAD) || defined(_WIN32))
14
15#include <opencv2/videoio.hpp>
16
17// Shared vars
18typedef enum { capture_waiting, capture_started, capture_stopped } t_CaptureState;
19t_CaptureState s_capture_state = capture_waiting;
20bool s_face_available = false;
21#if defined(VISP_HAVE_V4L2)
23#elif defined(VISP_HAVE_OPENCV)
24cv::Mat s_frame;
25#endif
26vpMutex s_mutex_capture;
27vpMutex s_mutex_face;
28vpRect s_face_bbox;
29
30vpThread::Return captureFunction(vpThread::Args args)
31{
32#if defined(VISP_HAVE_V4L2)
33 vpV4l2Grabber cap = *(static_cast<vpV4l2Grabber *>(args));
34#elif defined(HAVE_OPENCV_VIDEOIO)
35 cv::VideoCapture cap = *((cv::VideoCapture *)args);
36#endif
37
38 // If the image is larger than 640 by 480, we subsample
39#if defined(VISP_HAVE_V4L2)
41#elif defined(HAVE_OPENCV_VIDEOIO)
42 cv::Mat frame_;
43#endif
44 bool stop_capture_ = false;
45
46 double start_time = vpTime::measureTimeSecond();
47 while ((vpTime::measureTimeSecond() - start_time) < 30 && !stop_capture_) {
48 // Capture in progress
49 cap >> frame_; // get a new frame from camera
50
51 // Update shared data
52 {
53 vpMutex::vpScopedLock lock(s_mutex_capture);
54 if (s_capture_state == capture_stopped)
55 stop_capture_ = true;
56 else
57 s_capture_state = capture_started;
58 s_frame = frame_;
59 }
60 }
61 {
62 vpMutex::vpScopedLock lock(s_mutex_capture);
63 s_capture_state = capture_stopped;
64 }
65
66 std::cout << "End of capture thread" << std::endl;
67 return 0;
68}
69
70vpThread::Return displayFunction(vpThread::Args args)
71{
72 (void)args; // Avoid warning: unused parameter args
74
75 t_CaptureState capture_state_;
76 bool display_initialized_ = false;
77 bool face_available_ = false;
78 vpRect face_bbox_;
79#if defined(VISP_HAVE_X11)
80 vpDisplayX *d_ = NULL;
81#elif defined(VISP_HAVE_GDI)
82 vpDisplayGDI *d_ = NULL;
83#endif
84
85 do {
86 s_mutex_capture.lock();
87 capture_state_ = s_capture_state;
88 s_mutex_capture.unlock();
89
90 // Check if a frame is available
91 if (capture_state_ == capture_started) {
92 // Get the frame and convert it to a ViSP image used by the display
93 // class
94 {
95 vpMutex::vpScopedLock lock(s_mutex_capture);
96#if defined(VISP_HAVE_V4L2)
97 I_ = s_frame;
98#elif defined(VISP_HAVE_OPENCV)
99 vpImageConvert::convert(s_frame, I_);
100#endif
101 }
102
103 // Check if we need to initialize the display with the first frame
104 if (!display_initialized_) {
105 // Initialize the display
106#if defined(VISP_HAVE_X11)
107 d_ = new vpDisplayX(I_);
108 display_initialized_ = true;
109#elif defined(VISP_HAVE_GDI)
110 d_ = new vpDisplayGDI(I_);
111 display_initialized_ = true;
112#endif
113 }
114
115 // Display the image
117
118 // Check if a face was detected
119 {
120 vpMutex::vpScopedLock lock(s_mutex_face);
121 face_available_ = s_face_available;
122 face_bbox_ = s_face_bbox;
123 }
124 if (face_available_) {
125 // Access to the face bounding box to display it
126 vpDisplay::displayRectangle(I_, face_bbox_, vpColor::green, false, 4);
127 face_available_ = false;
128 }
129
130 // Trigger end of acquisition with a mouse click
131 vpDisplay::displayText(I_, 10, 10, "Click to exit...", vpColor::red);
132 if (vpDisplay::getClick(I_, false)) {
133 vpMutex::vpScopedLock lock(s_mutex_capture);
134 s_capture_state = capture_stopped;
135 }
136
137 // Update the display
139 }
140 else {
141 vpTime::wait(2); // Sleep 2ms
142 }
143 } while (capture_state_ != capture_stopped);
144
145#if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
146 delete d_;
147#endif
148
149 std::cout << "End of display thread" << std::endl;
150 return 0;
151}
152
154vpThread::Return detectionFunction(vpThread::Args args)
155{
156 std::string opt_face_cascade_name = *((std::string *)args);
157
158 vpDetectorFace face_detector_;
159 face_detector_.setCascadeClassifierFile(opt_face_cascade_name);
160
161 t_CaptureState capture_state_;
162#if defined(VISP_HAVE_V4L2)
164#elif defined(VISP_HAVE_OPENCV)
165 cv::Mat frame_;
166#endif
167 do {
168 s_mutex_capture.lock();
169 capture_state_ = s_capture_state;
170 s_mutex_capture.unlock();
171
172 // Check if a frame is available
173 if (capture_state_ == capture_started) {
174 // Backup the frame
175 {
176 vpMutex::vpScopedLock lock(s_mutex_capture);
177 frame_ = s_frame;
178 }
179
180 // Detect faces
181 bool face_found_ = face_detector_.detect(frame_);
182 if (face_found_) {
183 vpMutex::vpScopedLock lock(s_mutex_face);
184 s_face_available = true;
185 s_face_bbox = face_detector_.getBBox(0); // Get largest face bounding box
186 }
187 }
188 else {
189 vpTime::wait(2); // Sleep 2ms
190 }
191 } while (capture_state_ != capture_stopped);
192 std::cout << "End of face detection thread" << std::endl;
193
194 return 0;
195}
197
199int main(int argc, const char *argv [])
200{
201 std::string opt_face_cascade_name = "./haarcascade_frontalface_alt.xml";
202 unsigned int opt_device = 0;
203 unsigned int opt_scale = 2; // Default value is 2 in the constructor. Turn
204 // it to 1 to avoid subsampling
205
206 for (int i = 0; i < argc; i++) {
207 if (std::string(argv[i]) == "--haar")
208 opt_face_cascade_name = std::string(argv[i + 1]);
209 else if (std::string(argv[i]) == "--device")
210 opt_device = (unsigned int)atoi(argv[i + 1]);
211 else if (std::string(argv[i]) == "--scale")
212 opt_scale = (unsigned int)atoi(argv[i + 1]);
213 else if (std::string(argv[i]) == "--help") {
214 std::cout << "Usage: " << argv[0]
215 << " [--haar <haarcascade xml filename>] [--device <camera "
216 "device>] [--scale <subsampling factor>] [--help]"
217 << std::endl;
218 return EXIT_SUCCESS;
219 }
220 }
221
222 // Instantiate the capture
223#if defined(VISP_HAVE_V4L2)
224 vpV4l2Grabber cap;
225 std::ostringstream device;
226 device << "/dev/video" << opt_device;
227 cap.setDevice(device.str());
228 cap.setScale(opt_scale);
229#elif defined(HAVE_OPENCV_VIDEOIO)
230 cv::VideoCapture cap;
231 cap.open(opt_device);
232#if (VISP_HAVE_OPENCV_VERSION >= 0x030000)
233 int width = (int)cap.get(cv::CAP_PROP_FRAME_WIDTH);
234 int height = (int)cap.get(cv::CAP_PROP_FRAME_HEIGHT);
235 cap.set(cv::CAP_PROP_FRAME_WIDTH, width / opt_scale);
236 cap.set(cv::CAP_PROP_FRAME_HEIGHT, height / opt_scale);
237#else
238 int width = cap.get(CV_CAP_PROP_FRAME_WIDTH);
239 int height = cap.get(CV_CAP_PROP_FRAME_HEIGHT);
240 cap.set(CV_CAP_PROP_FRAME_WIDTH, width / opt_scale);
241 cap.set(CV_CAP_PROP_FRAME_HEIGHT, height / opt_scale);
242#endif
243#endif
244
245 // Start the threads
246 vpThread thread_capture((vpThread::Fn)captureFunction, (vpThread::Args)&cap);
247 vpThread thread_display((vpThread::Fn)displayFunction);
248 vpThread thread_detection((vpThread::Fn)detectionFunction, (vpThread::Args)&opt_face_cascade_name);
249
250 // Wait until thread ends up
251 thread_capture.join();
252 thread_display.join();
253 thread_detection.join();
254
255 return EXIT_SUCCESS;
256}
258
259#else
260int main()
261{
262#ifndef VISP_HAVE_OPENCV
263 std::cout << "You should install OpenCV to make this example working..." << std::endl;
264#elif !defined(_WIN32) && (defined(__unix__) || defined(__unix) || (defined(__APPLE__) && defined(__MACH__))) // UNIX
265 std::cout << "You should enable pthread usage and rebuild ViSP..." << std::endl;
266#else
267 std::cout << "Multi-threading seems not supported on this platform" << std::endl;
268#endif
269 return EXIT_SUCCESS;
270}
271
272#endif
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
vpRect getBBox(size_t i) const
bool detect(const vpImage< unsigned char > &I)
void setCascadeClassifierFile(const std::string &filename)
Display for windows using GDI (available on any windows 32 platform).
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayRectangle(const vpImage< unsigned char > &I, const vpImagePoint &topLeft, unsigned int width, unsigned int height, const vpColor &color, bool fill=false, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:135
Class that allows protection by mutex.
Definition vpMutex.h:166
void unlock()
Definition vpMutex.h:106
void lock()
Definition vpMutex.h:90
Defines a rectangle in the plane.
Definition vpRect.h:76
void *(*) Fn(Args)
Definition vpThread.h:74
void * Args
Definition vpThread.h:72
void * Return
Definition vpThread.h:73
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
VISP_EXPORT int wait(double t0, double t)
VISP_EXPORT double measureTimeSecond()