Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoPioneerPanSegment3D.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * IBVS on Pioneer P3DX mobile platform
33 *
34*****************************************************************************/
35#include <iostream>
36
37#include <visp3/core/vpConfig.h>
38
39#include <visp3/blob/vpDot2.h>
40#include <visp3/core/vpCameraParameters.h>
41#include <visp3/core/vpHomogeneousMatrix.h>
42#include <visp3/core/vpImage.h>
43#include <visp3/core/vpVelocityTwistMatrix.h>
44#include <visp3/gui/vpDisplayGDI.h>
45#include <visp3/gui/vpDisplayX.h> // Should be included after vpRobotPioneer.h
46#include <visp3/gui/vpPlot.h>
47#include <visp3/robot/vpPioneerPan.h>
48#include <visp3/robot/vpRobotBiclops.h>
49#include <visp3/robot/vpRobotPioneer.h> // Include first to avoid build issues with Status, None, isfinite
50#include <visp3/sensor/vp1394CMUGrabber.h>
51#include <visp3/sensor/vp1394TwoGrabber.h>
52#include <visp3/sensor/vpV4l2Grabber.h>
53#include <visp3/visual_features/vpFeatureBuilder.h>
54#include <visp3/visual_features/vpFeatureSegment.h>
55#include <visp3/vs/vpServo.h>
56
57#define USE_REAL_ROBOT
58#define USE_PLOTTER
59#undef VISP_HAVE_V4L2 // To use a firewire camera
60
80#if defined(VISP_HAVE_PIONEER) && defined(VISP_HAVE_BICLOPS)
81int main(int argc, char **argv)
82{
83#if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394)
84#if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
85 try {
86 vpImage<unsigned char> I; // Create a gray level image container
87 double lambda = 0.1;
88 // Scale parameter used to estimate the depth Z of the blob from its
89 // surface
90 // double coef = 0.9/14.85; // At 0.9m, the blob has a surface of 14.85
91 // (Logitec sphere)
92 double coef = 1.2 / 13.0; // At 1m, the blob has a surface of 11.3 (AVT Pike 032C)
93 double L = 0.21; // 3D horizontal segment length
94 double Z_d = 0.8; // Desired distance along Z between camera and segment
95 bool normalized = true; // segment normilized features are used
96
97 // Warning: To have a non singular task of rank 3, Y_d should be different
98 // from 0 so that the optical axis doesn't intersect the horizontal
99 // segment
100 double Y_d = -.11; // Desired distance along Y between camera and segment.
101 vpColVector qm(2); // Measured head position
102 qm = 0;
103 double qm_pan = 0; // Measured pan position (tilt is not handled in that example)
104
105#ifdef USE_REAL_ROBOT
106 // Initialize the biclops head
107
108 vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg");
109 biclops.setDenavitHartenbergModel(vpBiclops::DH1);
110
111 // Move to the initial position
112 vpColVector q(2);
113
114 q = 0;
115 // q[0] = vpMath::rad(63);
116 // q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera
117 // sphere tilt so that the camera is parallel to the plane
118
119 biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL);
120 biclops.setPosition(vpRobot::ARTICULAR_FRAME, q);
121 // biclops.setPositioningVelocity(50);
122 biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
123 qm_pan = qm[0];
124
125 // Now the head will be controlled in velocity
126 biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
127
128 // Initialize the pioneer robot
129 vpRobotPioneer pioneer;
130 ArArgumentParser parser(&argc, argv);
131 parser.loadDefaultArguments();
132
133 // ArRobotConnector connects to the robot, get some initial data from it
134 // such as type and name, and then loads parameter files for this robot.
135 ArRobotConnector robotConnector(&parser, &pioneer);
136 if (!robotConnector.connectRobot()) {
137 ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot.");
138 if (parser.checkHelpAndWarnUnparsed()) {
139 Aria::logOptions();
140 Aria::exit(1);
141 }
142 }
143 if (!Aria::parseArgs()) {
144 Aria::logOptions();
145 Aria::shutdown();
146 return false;
147 }
148
149 pioneer.useSonar(false); // disable the sonar device usage
150
151 // Wait 3 sec to be sure that the low level Aria thread used to control
152 // the robot is started. Without this delay we experienced a delay
153 // (around 2.2 sec) between the velocity send to the robot and the
154 // velocity that is really applied to the wheels.
155 sleep(3);
156
157 std::cout << "Pioneer robot connected" << std::endl;
158#endif
159
160 vpPioneerPan robot_pan; // Generic robot that computes the velocities for
161 // the pioneer and the biclops head
162
163 // Camera parameters. In this experiment we don't need a precise
164 // calibration of the camera
166
167// Create the camera framegrabber
168#if defined(VISP_HAVE_V4L2)
169 // Create a grabber based on v4l2 third party lib (for usb cameras under
170 // Linux)
172 g.setScale(1);
173 g.setInput(0);
174 g.setDevice("/dev/video1");
175 g.open(I);
176 // Logitec sphere parameters
177 cam.initPersProjWithoutDistortion(558, 555, 312, 210);
178#elif defined(VISP_HAVE_DC1394)
179 // Create a grabber based on libdc1394-2.x third party lib (for firewire
180 // cameras under Linux)
181 vp1394TwoGrabber g(false);
184 // AVT Pike 032C parameters
185 cam.initPersProjWithoutDistortion(800, 795, 320, 216);
186#elif defined(VISP_HAVE_CMU1394)
187 // Create a grabber based on CMU 1394 third party lib (for firewire
188 // cameras under windows)
190 g.setVideoMode(0, 5); // 640x480 MONO8
191 g.setFramerate(4); // 30 Hz
192 g.open(I);
193 // AVT Pike 032C parameters
194 cam.initPersProjWithoutDistortion(800, 795, 320, 216);
195#endif
196
197 // Acquire an image from the grabber
198 g.acquire(I);
199
200// Create an image viewer
201#if defined(VISP_HAVE_X11)
202 vpDisplayX d(I, 10, 10, "Current frame");
203#elif defined(VISP_HAVE_GDI)
204 vpDisplayGDI d(I, 10, 10, "Current frame");
205#endif
208
209 // The 3D segment consists in two horizontal dots
210 vpDot2 dot[2];
211 for (int i = 0; i < 2; i++) {
212 dot[i].setGraphics(true);
213 dot[i].setComputeMoments(true);
214 dot[i].setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
215 dot[i].setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
216 dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad
217 // inner and outside points
218 // with bad gray level
219 dot[i].initTracking(I);
221 }
222
223 vpServo task;
226 task.setLambda(lambda);
227 vpVelocityTwistMatrix cVe; // keep to identity
228 cVe = robot_pan.get_cVe();
229 task.set_cVe(cVe);
230
231 std::cout << "cVe: \n" << cVe << std::endl;
232
233 vpMatrix eJe;
234
235 // Update the robot jacobian that depends on the pan position
236 robot_pan.set_eJe(qm_pan);
237 // Get the robot jacobian
238 eJe = robot_pan.get_eJe();
239 task.set_eJe(eJe);
240 std::cout << "eJe: \n" << eJe << std::endl;
241
242 // Define a 3D horizontal segment an its cordinates in the image plane
243 vpPoint P[2];
244 P[0].setWorldCoordinates(-L / 2, 0, 0);
245 P[1].setWorldCoordinates(L / 2, 0, 0);
246 // Define the desired camera position
247 vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0,
248 0); // Here we are in front of the segment
249 for (int i = 0; i < 2; i++) {
250 P[i].changeFrame(cMo);
251 P[i].project(); // Here the x,y parameters obtained by perspective
252 // projection are computed
253 }
254
255 // Estimate the depth of the segment extremity points
256 double surface[2];
257 double Z[2]; // Depth of the segment points
258 for (int i = 0; i < 2; i++) {
259 // Surface of the blob estimated from the image moment m00 and converted
260 // in meters
261 surface[i] = 1. / sqrt(dot[i].m00 / (cam.get_px() * cam.get_py()));
262
263 // Initial depth of the blob
264 Z[i] = coef * surface[i];
265 }
266
267 // Use here a feature segment builder
268 vpFeatureSegment s_segment(normalized),
269 s_segment_d(normalized); // From the segment feature we use only alpha
270 vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
271 s_segment.setZ1(Z[0]);
272 s_segment.setZ2(Z[1]);
273 // Set the desired feature
274 vpFeatureBuilder::create(s_segment_d, P[0], P[1]);
275 s_segment.setZ1(P[0].get_Z()); // Desired depth
276 s_segment.setZ2(P[1].get_Z());
277
278 task.addFeature(s_segment, s_segment_d,
280
281#ifdef USE_PLOTTER
282 // Create a window (500 by 500) at position (700, 10) with two graphics
283 vpPlot graph(2, 500, 500, 700, 10, "Curves...");
284
285 // The first graphic contains 3 curve and the second graphic contains 3
286 // curves
287 graph.initGraph(0, 3);
288 graph.initGraph(1, 3);
289 graph.setTitle(0, "Velocities");
290 graph.setTitle(1, "Error s-s*");
291 graph.setLegend(0, 0, "vx");
292 graph.setLegend(0, 1, "wz");
293 graph.setLegend(0, 2, "w_pan");
294 graph.setLegend(1, 0, "xm/l");
295 graph.setLegend(1, 1, "1/l");
296 graph.setLegend(1, 2, "alpha");
297#endif
298
299 vpColVector v; // vz, wx
300
301 try {
302 unsigned int iter = 0;
303 while (1) {
304#ifdef USE_REAL_ROBOT
305 // Get the new pan position
306 biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
307#endif
308 qm_pan = qm[0];
309
310 // Acquire a new image
311 g.acquire(I);
312 // Set the image as background of the viewer
314
315 // Display the desired position of the segment
316 for (int i = 0; i < 2; i++)
317 P[i].display(I, cam, vpColor::red, 3);
318
319 // Does the blob tracking
320 for (int i = 0; i < 2; i++)
321 dot[i].track(I);
322
323 for (int i = 0; i < 2; i++) {
324 // Surface of the blob estimated from the image moment m00 and
325 // converted in meters
326 surface[i] = 1. / sqrt(dot[i].m00 / (cam.get_px() * cam.get_py()));
327
328 // Initial depth of the blob
329 Z[i] = coef * surface[i];
330 }
331
332 // Update the features
333 vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
334 // Update the depth of the point. Useful only if current interaction
335 // matrix is used when task.setInteractionMatrixType(vpServo::CURRENT,
336 // vpServo::PSEUDO_INVERSE) is set
337 s_segment.setZ1(Z[0]);
338 s_segment.setZ2(Z[1]);
339
340 robot_pan.get_cVe(cVe);
341 task.set_cVe(cVe);
342
343 // Update the robot jacobian that depends on the pan position
344 robot_pan.set_eJe(qm_pan);
345 // Get the robot jacobian
346 eJe = robot_pan.get_eJe();
347 // Update the jacobian that will be used to compute the control law
348 task.set_eJe(eJe);
349
350 // Compute the control law. Velocities are computed in the mobile
351 // robot reference frame
352 v = task.computeControlLaw();
353
354 // std::cout << "-----" << std::endl;
355 // std::cout << "v: " << v.t() << std::endl;
356 // std::cout << "error: " << task.getError().t() << std::endl;
357 // std::cout << "L:\n " << task.getInteractionMatrix() <<
358 // std::endl; std::cout << "eJe:\n " << task.get_eJe() <<
359 // std::endl; std::cout << "cVe:\n " << task.get_cVe() <<
360 // std::endl; std::cout << "L_cVe_eJe:\n" <<
361 // task.getInteractionMatrix() * task.get_cVe() * task.get_eJe()
362 // << std::endl; task.print() ;
363 if (task.getTaskRank() != 3)
364 std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl;
365
366#ifdef USE_PLOTTER
367 graph.plot(0, iter, v); // plot velocities applied to the robot
368 graph.plot(1, iter, task.getError()); // plot error vector
369#endif
370
371#ifdef USE_REAL_ROBOT
372 // Send the velocity to the robot
373 vpColVector v_pioneer(2); // vx, wz
374 v_pioneer[0] = v[0];
375 v_pioneer[1] = v[1];
376 vpColVector v_biclops(2); // qdot pan and tilt
377 v_biclops[0] = v[2];
378 v_biclops[1] = 0;
379
380 std::cout << "Send velocity to the pionner: " << v_pioneer[0] << " m/s " << vpMath::deg(v_pioneer[1])
381 << " deg/s" << std::endl;
382 std::cout << "Send velocity to the biclops head: " << vpMath::deg(v_biclops[0]) << " deg/s" << std::endl;
383
384 pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer);
385 biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops);
386#endif
387
388 // Draw a vertical line which corresponds to the desired x coordinate
389 // of the dot cog
390 vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red);
392
393 // A click in the viewer to exit
394 if (vpDisplay::getClick(I, false))
395 break;
396
397 iter++;
398 // break;
399 }
400 } catch (...) {
401 }
402
403#ifdef USE_REAL_ROBOT
404 std::cout << "Ending robot thread..." << std::endl;
405 pioneer.stopRunning();
406
407 // wait for the thread to stop
408 pioneer.waitForRunExit();
409#endif
410
411 // Kill the servo task
412 task.print();
413 return EXIT_SUCCESS;
414 } catch (const vpException &e) {
415 std::cout << "Catch an exception: " << e << std::endl;
416 return EXIT_FAILURE;
417 }
418#endif
419#endif
420}
421#else
422int main()
423{
424 std::cout << "ViSP is not able to control the Pioneer robot" << std::endl;
425 return EXIT_SUCCESS;
426}
427#endif
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setVideoMode(unsigned long format, unsigned long mode)
void acquire(vpImage< unsigned char > &I)
void setFramerate(unsigned long fps)
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:211
Display for windows using GDI (available on any windows 32 platform).
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition vpDot2.h:124
void setGraphics(bool activate)
Definition vpDot2.h:311
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition vpDot2.cpp:717
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition vpDot2.h:287
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition vpDot2.cpp:788
void setComputeMoments(bool activate)
Definition vpDot2.h:273
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition vpDot2.cpp:252
error that can be emitted by ViSP classes.
Definition vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D segment visual features. This class allow to consider two sets of visual feat...
static unsigned int selectAlpha()
static unsigned int selectXc()
static unsigned int selectL()
Implementation of an homogeneous matrix and operations on such kind of matrices.
Definition of the vpImage class member functions.
Definition vpImage.h:135
static double deg(double rad)
Definition vpMath.h:106
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
Generic functions for Pioneer mobile robots equiped with a pan head.
void set_eJe(double q_pan)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:113
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const
Definition vpPoint.cpp:236
void setWorldCoordinates(double oX, double oY, double oZ)
Definition vpPoint.cpp:110
Interface for the biclops, pan, tilt head control.
Interface for Pioneer mobile robots based on Aria 3rd party library.
void useSonar(bool usage)
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ REFERENCE_FRAME
Definition vpRobot.h:74
@ ARTICULAR_FRAME
Definition vpRobot.h:76
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition vpRobot.h:65
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:155
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition vpServo.h:448
unsigned int getTaskRank() const
Definition vpServo.cpp:1796
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition vpServo.cpp:299
void setLambda(double c)
Definition vpServo.h:403
void set_eJe(const vpMatrix &eJe_)
Definition vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
@ PSEUDO_INVERSE
Definition vpServo.h:199
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ DESIRED
Definition vpServo.h:183
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
vpMatrix get_eJe() const
Definition vpUnicycle.h:104
vpVelocityTwistMatrix get_cVe() const
Definition vpUnicycle.h:79
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setFramerate(vpV4l2FramerateType framerate)
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)