ViSP
 All Classes Functions Variables Enumerations Enumerator Friends Groups Pages
servoPioneerPanSegment3D.cpp
1 /****************************************************************************
2  *
3  * $Id: servoPioneerPanSegment3D.cpp 4056 2013-01-05 13:04:42Z fspindle $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  *
34  * Description:
35  * IBVS on Pioneer P3DX mobile platform
36  *
37  * Authors:
38  * Fabien Spindler
39  *
40  *****************************************************************************/
41 #include <iostream>
42 
43 #include <visp/vpConfig.h>
44 
45 #include <visp/vpRobotPioneer.h> // Include before vpDisplayX to avoid build issues
46 #include <visp/vpRobotBiclops.h>
47 #include <visp/vpCameraParameters.h>
48 #include <visp/vpDisplayGDI.h>
49 #include <visp/vpDisplayX.h>
50 #include <visp/vpDot2.h>
51 #include <visp/vpFeatureBuilder.h>
52 #include <visp/vpFeatureSegment.h>
53 #include <visp/vpHomogeneousMatrix.h>
54 #include <visp/vpImage.h>
55 #include <visp/vp1394TwoGrabber.h>
56 #include <visp/vp1394CMUGrabber.h>
57 #include <visp/vpV4l2Grabber.h>
58 #include <visp/vpPioneerPan.h>
59 #include <visp/vpPlot.h>
60 #include <visp/vpServo.h>
61 #include <visp/vpVelocityTwistMatrix.h>
62 
63 #define USE_REAL_ROBOT
64 #define USE_PLOTTER
65 #undef VISP_HAVE_V4L2 // To use a firewire camera
66 
84 #if defined(VISP_HAVE_PIONEER) && defined(VISP_HAVE_BICLOPS)
85 int main(int argc, char **argv)
86 {
87 #if defined(VISP_HAVE_DC1394_2) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394)
88 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
89  vpImage<unsigned char> I; // Create a gray level image container
90  double lambda = 0.1;
91  // Scale parameter used to estimate the depth Z of the blob from its surface
92  //double coef = 0.9/14.85; // At 0.9m, the blob has a surface of 14.85 (Logitec sphere)
93  double coef = 1.2/13.0; // At 1m, the blob has a surface of 11.3 (AVT Pike 032C)
94  double L = 0.21; // 3D horizontal segment length
95  double Z_d = 0.8; // Desired distance along Z between camera and segment
96  bool save = false;
97  bool normalized = true; // segment normilized features are used
98 
99  // Warning: To have a non singular task of rank 3, Y_d should be different from 0 so that
100  // the optical axis doesn't intersect the horizontal segment
101  double Y_d = -.11; // Desired distance along Y between camera and segment.
102  vpColVector qm(2); // Measured head position
103  qm = 0;
104  double qm_pan = 0; // Measured pan position (tilt is not handled in that example)
105 
106 #ifdef USE_REAL_ROBOT
107  // Initialize the biclops head
108 
109  vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg");
110  biclops.setDenavitHartenbergModel(vpBiclops::DH1);
111 
112  // Move to the initial position
113  vpColVector q(2);
114 
115  q=0;
116 // q[0] = vpMath::rad(63);
117 // q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera sphere tilt so that the camera is parallel to the plane
118 
119  biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL) ;
120  biclops.setPosition( vpRobot::ARTICULAR_FRAME, q );
121  //biclops.setPositioningVelocity(50);
122  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
123  qm_pan = qm[0];
124 
125  // Now the head will be controlled in velocity
126  biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ;
127 
128  // Initialize the pioneer robot
129  vpRobotPioneer pioneer;
130  ArArgumentParser parser(&argc, argv);
131  parser.loadDefaultArguments();
132 
133  // ArRobotConnector connects to the robot, get some initial data from it such as type and name,
134  // and then loads parameter files for this robot.
135  ArRobotConnector robotConnector(&parser, &pioneer);
136  if(!robotConnector.connectRobot())
137  {
138  ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot.");
139  if(parser.checkHelpAndWarnUnparsed())
140  {
141  Aria::logOptions();
142  Aria::exit(1);
143  }
144  }
145  if (!Aria::parseArgs())
146  {
147  Aria::logOptions();
148  Aria::shutdown();
149  return false;
150  }
151 
152  pioneer.useSonar(false); // disable the sonar device usage
153 
154  // Wait 3 sec to be sure that the low level Aria thread used to control
155  // the robot is started. Without this delay we experienced a delay (arround 2.2 sec)
156  // between the velocity send to the robot and the velocity that is really applied
157  // to the wheels.
158  sleep(3);
159 
160  std::cout << "Pioneer robot connected" << std::endl;
161 #endif
162 
163  vpPioneerPan robot_pan; // Generic robot that computes the velocities for the pioneer and the biclops head
164 
165  // Camera parameters. In this experiment we don't need a precise calibration of the camera
166  vpCameraParameters cam;
167 
168  // Create the camera framegrabber
169 #if defined(VISP_HAVE_V4L2)
170  // Create a grabber based on v4l2 third party lib (for usb cameras under Linux)
171  vpV4l2Grabber g;
172  g.setScale(1);
173  g.setInput(0);
174  g.setDevice("/dev/video1");
175  g.open(I);
176  // Logitec sphere parameters
177  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
178 #elif defined(VISP_HAVE_DC1394_2)
179  // Create a grabber based on libdc1394-2.x third party lib (for firewire cameras under Linux)
180  vp1394TwoGrabber g(false);
183  // AVT Pike 032C parameters
184  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
185 #elif defined(VISP_HAVE_CMU1394)
186  // Create a grabber based on CMU 1394 third party lib (for firewire cameras under windows)
188  g.setVideoMode(0, 5); // 640x480 MONO8
189  g.setFramerate(4); // 30 Hz
190  g.open(I);
191  // AVT Pike 032C parameters
192  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
193 #endif
194 
195  // Acquire an image from the grabber
196  g.acquire(I);
197 
198  // Create an image viewer
199 #if defined(VISP_HAVE_X11)
200  vpDisplayX d(I, 10, 10, "Current frame");
201 #elif defined(VISP_HAVE_GDI)
202  vpDisplayGDI d(I, 10, 10, "Current frame");
203 #endif
205  vpDisplay::flush(I);
206 
207  // The 3D segment consists in two horizontal dots
208  vpDot2 dot[2];
209  for (int i=0; i <2; i++)
210  {
211  dot[i].setGraphics(true);
212  dot[i].setComputeMoments(true);
213  dot[i].setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
214  dot[i].setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
215  dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner and outside points with bad gray level
216  dot[i].initTracking(I);
217  vpDisplay::flush(I);
218  }
219 
220  vpServo task;
223  task.setLambda(lambda) ;
224  vpVelocityTwistMatrix cVe ; // keep to identity
225  cVe = robot_pan.get_cVe() ;
226  task.set_cVe(cVe) ;
227 
228  std::cout << "cVe: \n" << cVe << std::endl;
229 
230  vpMatrix eJe;
231 
232  // Update the robot jacobian that depends on the pan position
233  robot_pan.set_eJe(qm_pan);
234  // Get the robot jacobian
235  eJe = robot_pan.get_eJe() ;
236  task.set_eJe(eJe) ;
237  std::cout << "eJe: \n" << eJe << std::endl;
238 
239  // Define a 3D horizontal segment an its cordinates in the image plane
240  vpPoint P[2];
241  P[0].setWorldCoordinates(-L/2, 0, 0);
242  P[1].setWorldCoordinates( L/2, 0, 0);
243  // Define the desired camera position
244  vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0, 0); // Here we are in front of the segment
245  for (int i=0; i <2; i++)
246  {
247  P[i].changeFrame(cMo);
248  P[i].project(); // Here the x,y parameters obtained by perspective projection are computed
249  }
250 
251  // Estimate the depth of the segment extremity points
252  double surface[2];
253  double Z[2]; // Depth of the segment points
254  for (int i=0; i<2; i++)
255  {
256  // Surface of the blob estimated from the image moment m00 and converted in meters
257  surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));
258 
259  // Initial depth of the blob
260  Z[i] = coef * surface[i] ;
261  }
262 
263  // Use here a feature segment builder
264  vpFeatureSegment s_segment(normalized), s_segment_d(normalized); // From the segment feature we use only alpha
265  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
266  s_segment.setZ1(Z[0]);
267  s_segment.setZ2(Z[1]);
268  // Set the desired feature
269  vpFeatureBuilder::create(s_segment_d, P[0], P[1]);
270  s_segment.setZ1( P[0].get_Z() ); // Desired depth
271  s_segment.setZ2( P[1].get_Z() );
272 
273  task.addFeature(s_segment, s_segment_d,
277 
278 #ifdef USE_PLOTTER
279  //Create a window (500 by 500) at position (700, 10) with two graphics
280  vpPlot graph(2, 500, 500, 700, 10, "Curves...");
281 
282  //The first graphic contains 3 curve and the second graphic contains 3 curves
283  graph.initGraph(0,3);
284  graph.initGraph(1,3);
285  graph.setTitle(0, "Velocities");
286  graph.setTitle(1, "Error s-s*");
287  graph.setLegend(0, 0, "vx");
288  graph.setLegend(0, 1, "wz");
289  graph.setLegend(0, 2, "w_pan");
290  graph.setLegend(1, 0, "xm/l");
291  graph.setLegend(1, 1, "1/l");
292  graph.setLegend(1, 2, "alpha");
293 #endif
294 
295  vpColVector v; // vz, wx
296  unsigned int iter = 0;
297  try
298  {
299  while(1)
300  {
301 #ifdef USE_REAL_ROBOT
302  // Get the new pan position
303  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
304 #endif
305  qm_pan = qm[0];
306 
307  // Acquire a new image
308  g.acquire(I);
309  // Set the image as background of the viewer
311 
312  // Display the desired position of the segment
313  for (int i=0; i<2; i++)
314  P[i].display(I, cam, vpColor::red, 3);
315 
316  // Does the blob tracking
317  for (int i=0; i<2; i++)
318  dot[i].track(I);
319 
320  for (int i=0; i<2; i++)
321  {
322  // Surface of the blob estimated from the image moment m00 and converted in meters
323  surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));
324 
325  // Initial depth of the blob
326  Z[i] = coef * surface[i] ;
327  }
328 
329  // Update the features
330  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
331  // Update the depth of the point. Useful only if current interaction matrix is used
332  // when task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE) is set
333  s_segment.setZ1(Z[0]);
334  s_segment.setZ2(Z[1]);
335 
336  robot_pan.get_cVe(cVe);
337  task.set_cVe(cVe);
338 
339  // Update the robot jacobian that depends on the pan position
340  robot_pan.set_eJe(qm_pan);
341  // Get the robot jacobian
342  eJe = robot_pan.get_eJe();
343  // Update the jacobian that will be used to compute the control law
344  task.set_eJe(eJe);
345 
346  // Compute the control law. Velocities are computed in the mobile robot reference frame
347  v = task.computeControlLaw();
348 
349 // std::cout << "-----" << std::endl;
350 // std::cout << "v: " << v.t() << std::endl;
351 // std::cout << "error: " << task.getError().t() << std::endl;
352 // std::cout << "L:\n " << task.getInteractionMatrix() << std::endl;
353 // std::cout << "eJe:\n " << task.get_eJe() << std::endl;
354 // std::cout << "cVe:\n " << task.get_cVe() << std::endl;
355 // std::cout << "L_cVe_eJe:\n" << task.getInteractionMatrix() * task.get_cVe() * task.get_eJe() << std::endl;
356 // task.print() ;
357  if (task.getTaskRank() != 3)
358  std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl;
359 
360 #ifdef USE_PLOTTER
361  graph.plot(0, iter, v); // plot velocities applied to the robot
362  graph.plot(1, iter, task.getError()); // plot error vector
363 #endif
364 
365 #ifdef USE_REAL_ROBOT
366  // Send the velocity to the robot
367  vpColVector v_pioneer(2); // vx, wz
368  v_pioneer[0] = v[0];
369  v_pioneer[1] = v[1];
370  vpColVector v_biclops(2); // qdot pan and tilt
371  v_biclops[0] = v[2];
372  v_biclops[1] = 0;
373 
374  std::cout << "Send velocity to the pionner: " << v_pioneer[0] << " m/s "
375  << vpMath::deg(v_pioneer[1]) << " deg/s" << std::endl;
376  std::cout << "Send velocity to the biclops head: " << vpMath::deg(v_biclops[0]) << " deg/s" << std::endl;
377 
378  pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer);
379  biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops) ;
380 #endif
381 
382  // Draw a vertical line which corresponds to the desired x coordinate of the dot cog
383  vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red);
384  vpDisplay::flush(I);
385 
386  // A click in the viewer to exit
387  if ( vpDisplay::getClick(I, false) )
388  break;
389 
390  iter ++;
391  //break;
392  }
393  }
394  catch(...)
395  {
396  }
397 
398 #ifdef USE_REAL_ROBOT
399  std::cout << "Ending robot thread..." << std::endl;
400  pioneer.stopRunning();
401 
402  // wait for the thread to stop
403  pioneer.waitForRunExit();
404 #endif
405 
406  // Kill the servo task
407  task.print() ;
408  task.kill();
409 
410 #endif
411 #endif
412 }
413 #else
414 int main()
415 {
416  std::cout << "ViSP is not able to control the Pioneer robot" << std::endl;
417  return 0;
418 }
419 #endif