iLab Neuromorphic Robotics Toolkit  0.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
test-PointCloud2OpenNI.C
Go to the documentation of this file.
1 /*! @file
2  @author Shane Grant
3  @copyright GNU Public License (GPL v3)
4  @section License
5  @verbatim
6  // ////////////////////////////////////////////////////////////////////////
7  // The iLab Neuromorphic Robotics Toolkit (NRT) //
8  // Copyright 2010-2012 by the University of Southern California (USC) //
9  // and the iLab at USC. //
10  // //
11  // iLab - University of Southern California //
12  // Hedco Neurociences Building, Room HNB-10 //
13  // Los Angeles, Ca 90089-2520 - USA //
14  // //
15  // See http://ilab.usc.edu for information about this project. //
16  // ////////////////////////////////////////////////////////////////////////
17  // This file is part of The iLab Neuromorphic Robotics Toolkit. //
18  // //
19  // The iLab Neuromorphic Robotics Toolkit is free software: you can //
20  // redistribute it and/or modify it under the terms of the GNU General //
21  // Public License as published by the Free Software Foundation, either //
22  // version 3 of the License, or (at your option) any later version. //
23  // //
24  // The iLab Neuromorphic Robotics Toolkit is distributed in the hope //
25  // that it will be useful, but WITHOUT ANY WARRANTY; without even the //
26  // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR //
27  // PURPOSE. See the GNU General Public License for more details. //
28  // //
29  // You should have received a copy of the GNU General Public License //
30  // along with The iLab Neuromorphic Robotics Toolkit. If not, see //
31  // <http://www.gnu.org/licenses/>. //
32  // ////////////////////////////////////////////////////////////////////////
33  @endverbatim */
34 
35 #include <nrt/config.h>
36 #if defined(NRT_HAVE_CLOUD) && defined(NRT_HAVE_OPENNI)
37 
38 #include <nrt/Core/Image/Image.H>
39 #include <nrt/Core/Model/Manager.H>
41 #include <nrt/Core/Typing/Time.H>
42 
43 // for playing around with registration
52 
53 // for playing around with filters
55 
57 #include <nrt/Graphics/Shapes.H>
58 
61 
62 using namespace nrt;
63 
64 int main(int argc, char const* argv[])
65 {
66  try
67  {
68  Manager mgr(argc, argv);
69 
70  auto window = mgr.addComponent<nrt::graphics::ShapeRendererBasic>("Renderer");
71  auto mySource = mgr.addComponent<OpenNIImageSource>("OpenNI");
72 
73  // get started:
74  mgr.launch();
75 
76  // Create a drawing context:
77  window->createContext();
78 
79  // Set our camera:
80  window->lookAt(Eigen::Vector3d(0, -3, 3), Eigen::Vector3d(0, 0, 0), Eigen::Vector3d::UnitZ());
81 
82  bool first = false;
83 
84  PointCloud2 firstCloud;
85  PointCloud2 firstCloudDense;
86  PointCloud2::AffineTransform lastAlignment = PointCloud2::AffineTransform::Identity();
87 
88  // Create the parameters for our registration methods
89  const size_t maxIters = 20;
90  ConvergenceCriteriaBase::SharedPtr convergeCritera( new StandardConvergenceCriteria( maxIters, true ) );
91 
92  CorrespondenceEstimationBase::SharedPtr correspondenceEstimator( new CorrespondenceEstimationNearestNeighbor() );
93 
94  CorrespondenceRejectionBase::SharedPtr correspondencePrunerDistance( new CorrespondenceRejectionDistance( 0.05 ) );
95 
96  CorrespondenceRejectionBase::SharedPtr correspondencePrunerRANSAC( new CorrespondenceRejectionRANSAC( 0.05 ) );
97 
100 
101  // Construct the registration objects
102  Registration myRegistrationSVD( correspondenceEstimator, {correspondencePrunerDistance, correspondencePrunerRANSAC},
103  estimationSVD,
104  convergeCritera );
105 
106  Registration myRegistrationLM( correspondenceEstimator, {correspondencePrunerDistance, correspondencePrunerRANSAC},
107  estimationLM,
108  convergeCritera );
109 
110  Timer t;
111 
112  bool downSample = true;
113  bool f2f = false;
114  //downSample = false;
115 
116  PointCloud2 cloud;
117  PointCloud2 cloudRotated;
118  PointCloud2 downSampledCloud;
119  PointCloud2 downSampledCloudRotated;
120  PointCloud2 previousCloud; // used in frame 2 frame reg
121  std::mutex theMutex;
122 
123  std::thread inputthread([&](){
124  while(true)
125  {
126  std::cout << "a to append q to quit r to reset: ";
127  char buf;
128  std::cin >> buf;
129  if(buf == 'q') exit(0);
130  else if( buf == 'a' )
131  {
132  std::lock_guard<std::mutex> lock( theMutex );
133  firstCloud.append( downSampledCloudRotated );
134  firstCloudDense.append( cloudRotated );
135  }
136  else if( buf == 'r' )
137  {
138  std::lock_guard<std::mutex> lock( theMutex );
139  firstCloud = downSampledCloud;
140  firstCloudDense = cloud;
141  }
142  else if( buf == 'f' )
143  {
144  std::lock_guard<std::mutex> lock( theMutex );
145  f2f = true;
146  firstCloud = downSampledCloud;
147  firstCloudDense = cloud;
148  }
149  }});
150 
151 
152  while (true)
153  {
154  if ( !mySource->ok() )
155  {
156  usleep( 1.0 / 100.0 * 1000000 );
157  continue;
158  }
159 
160  window->initFrame();
161 
162  // Get the next image:
163  nrt::GenericImage image, depth, ir;
164  mySource->in_rgbd(image, depth, ir);
165 
166  //NRT_WARNING("Got data: RGB: " << image.dims() << " and Depth: " << depth.dims());
167 
168  // Get the device data
169  auto data = mySource->getDeviceData();
170 
171  // make the cloud
172  if (image.dims().empty() == false && depth.dims().empty() == false) // we have RGB data
173  {
175  data.Depth.focalLength, false);
176  if( downSample )
177  downSampledCloud = filterPointCloud( cloud, VoxelFilter( 0.05 ), VoxelFilter::GeometryColorAverage() );
178  else
179  downSampledCloud = cloud;
180  }
181  else if (depth.dims().empty() == false) // 50 shades of gray
182  {
183  cloud = nrt::depthToPointCloud(depth.get<nrt::PixGray<float>>(), data.Depth.focalLength, false);
184  if( downSample )
185  downSampledCloud = filterPointCloud( cloud, VoxelFilter( 0.05 ) );
186  else
187  downSampledCloud = cloud;
188  }
189 
190  // Do something with your cloud
191  //NRT_INFO( "My cloud has " << cloud.size() << " points and they are " << ((cloud.hasField<PixRGB<byte>>()) ? "in" : "not in") << " color!" );
192 
193  // Try registration
195  if( !first )
196  {
197  firstCloudDense = cloud;
198  firstCloud = downSampledCloud;
199  first = true;
200  }
201  else
202  {
203  std::lock_guard<std::mutex> lock( theMutex );
204  t.reset();
205 
206  alignment = myRegistrationLM.align( downSampledCloud, f2f ? previousCloud : firstCloud, lastAlignment );
207  //NRT_INFO( "LM Results in " << t.getreset().count() << " seconds:\n" << alignment.matrix() );
208  //NRT_INFO( "Converged = " << myRegistrationLM.converged() );
209 
210  //alignment = myRegistrationSVD.align( downSampledCloud, firstCloud, lastAlignment );
211  //NRT_INFO( "SVD Results in " << t.getreset().count() << " seconds:\n" << alignment.matrix() );
212  //NRT_INFO( "Converged = " << myRegistrationSVD.converged() );
213 
214  lastAlignment = alignment;
215  previousCloud = downSampledCloud;
216 
217  // transform for visualization and appending
218  transformPointCloudInPlace( cloud, alignment );
219  transformPointCloudInPlace( downSampledCloud, alignment );
220 
221  cloudRotated = cloud;
222  downSampledCloudRotated = downSampledCloud;
223 
224  // Render the first cloud
225  graphics::PointCloud drawCloud( firstCloudDense, 0.005f );
226  drawCloud.setDisplayType(graphics::PointCloud::Cube);
227  drawCloud.render( *window );
228  }
229 
230  // Render the current cloud
231  graphics::PointCloud drawCloud( cloud, 3.0f );
232  drawCloud.render( *window );
233  window->renderFrame();
234  }
235  }
237  { NRT_WARNING(" Received Parameter exception:\n" << e.what()); }
238 
239  return 0;
240 }
241 #else // no openni
242 #include <nrt/Core/Debugging/Log.H>
243 
244 int main(int argc, char const * argv[] )
245 {
246  NRT_INFO("No OpenNI support");
247  return 0;
248 }
249 #endif // NRT_HAVE_OPENNI