iLab Neuromorphic Robotics Toolkit  0.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
tests/test-PointCloud2OpenNI.C
/*! @file
@author Shane Grant
@copyright GNU Public License (GPL v3)
@section License
@verbatim
// ////////////////////////////////////////////////////////////////////////
// The iLab Neuromorphic Robotics Toolkit (NRT) //
// Copyright 2010-2012 by the University of Southern California (USC) //
// and the iLab at USC. //
// //
// iLab - University of Southern California //
// Hedco Neurociences Building, Room HNB-10 //
// Los Angeles, Ca 90089-2520 - USA //
// //
// See http://ilab.usc.edu for information about this project. //
// ////////////////////////////////////////////////////////////////////////
// This file is part of The iLab Neuromorphic Robotics Toolkit. //
// //
// The iLab Neuromorphic Robotics Toolkit is free software: you can //
// redistribute it and/or modify it under the terms of the GNU General //
// Public License as published by the Free Software Foundation, either //
// version 3 of the License, or (at your option) any later version. //
// //
// The iLab Neuromorphic Robotics Toolkit is distributed in the hope //
// that it will be useful, but WITHOUT ANY WARRANTY; without even the //
// implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR //
// PURPOSE. See the GNU General Public License for more details. //
// //
// You should have received a copy of the GNU General Public License //
// along with The iLab Neuromorphic Robotics Toolkit. If not, see //
// <http://www.gnu.org/licenses/>. //
// ////////////////////////////////////////////////////////////////////////
@endverbatim */
#include <nrt/config.h>
#if defined(NRT_HAVE_CLOUD) && defined(NRT_HAVE_OPENNI)
// for playing around with registration
// for playing around with filters
using namespace nrt;
int main(int argc, char const* argv[])
{
try
{
Manager mgr(argc, argv);
auto window = mgr.addComponent<nrt::graphics::ShapeRendererBasic>("Renderer");
auto mySource = mgr.addComponent<OpenNIImageSource>("OpenNI");
// get started:
mgr.launch();
// Create a drawing context:
window->createContext();
// Set our camera:
window->lookAt(Eigen::Vector3d(0, -3, 3), Eigen::Vector3d(0, 0, 0), Eigen::Vector3d::UnitZ());
bool first = false;
PointCloud2 firstCloud;
PointCloud2 firstCloudDense;
PointCloud2::AffineTransform lastAlignment = PointCloud2::AffineTransform::Identity();
// Create the parameters for our registration methods
const size_t maxIters = 20;
ConvergenceCriteriaBase::SharedPtr convergeCritera( new StandardConvergenceCriteria( maxIters, true ) );
CorrespondenceEstimationBase::SharedPtr correspondenceEstimator( new CorrespondenceEstimationNearestNeighbor() );
CorrespondenceRejectionBase::SharedPtr correspondencePrunerDistance( new CorrespondenceRejectionDistance( 0.05 ) );
CorrespondenceRejectionBase::SharedPtr correspondencePrunerRANSAC( new CorrespondenceRejectionRANSAC( 0.05 ) );
// Construct the registration objects
Registration myRegistrationSVD( correspondenceEstimator, {correspondencePrunerDistance, correspondencePrunerRANSAC},
estimationSVD,
convergeCritera );
Registration myRegistrationLM( correspondenceEstimator, {correspondencePrunerDistance, correspondencePrunerRANSAC},
estimationLM,
convergeCritera );
Timer t;
bool downSample = true;
bool f2f = false;
//downSample = false;
PointCloud2 cloud;
PointCloud2 cloudRotated;
PointCloud2 downSampledCloud;
PointCloud2 downSampledCloudRotated;
PointCloud2 previousCloud; // used in frame 2 frame reg
std::mutex theMutex;
std::thread inputthread([&](){
while(true)
{
std::cout << "a to append q to quit r to reset: ";
char buf;
std::cin >> buf;
if(buf == 'q') exit(0);
else if( buf == 'a' )
{
std::lock_guard<std::mutex> lock( theMutex );
firstCloud.append( downSampledCloudRotated );
firstCloudDense.append( cloudRotated );
}
else if( buf == 'r' )
{
std::lock_guard<std::mutex> lock( theMutex );
firstCloud = downSampledCloud;
firstCloudDense = cloud;
}
else if( buf == 'f' )
{
std::lock_guard<std::mutex> lock( theMutex );
f2f = true;
firstCloud = downSampledCloud;
firstCloudDense = cloud;
}
}});
while (true)
{
if ( !mySource->ok() )
{
usleep( 1.0 / 100.0 * 1000000 );
continue;
}
window->initFrame();
// Get the next image:
nrt::GenericImage image, depth, ir;
mySource->in_rgbd(image, depth, ir);
//NRT_WARNING("Got data: RGB: " << image.dims() << " and Depth: " << depth.dims());
// Get the device data
auto data = mySource->getDeviceData();
// make the cloud
if (image.dims().empty() == false && depth.dims().empty() == false) // we have RGB data
{
data.Depth.focalLength, false);
if( downSample )
downSampledCloud = filterPointCloud( cloud, VoxelFilter( 0.05 ), VoxelFilter::GeometryColorAverage() );
else
downSampledCloud = cloud;
}
else if (depth.dims().empty() == false) // 50 shades of gray
{
cloud = nrt::depthToPointCloud(depth.get<nrt::PixGray<float>>(), data.Depth.focalLength, false);
if( downSample )
downSampledCloud = filterPointCloud( cloud, VoxelFilter( 0.05 ) );
else
downSampledCloud = cloud;
}
// Do something with your cloud
//NRT_INFO( "My cloud has " << cloud.size() << " points and they are " << ((cloud.hasField<PixRGB<byte>>()) ? "in" : "not in") << " color!" );
// Try registration
if( !first )
{
firstCloudDense = cloud;
firstCloud = downSampledCloud;
first = true;
}
else
{
std::lock_guard<std::mutex> lock( theMutex );
t.reset();
alignment = myRegistrationLM.align( downSampledCloud, f2f ? previousCloud : firstCloud, lastAlignment );
//NRT_INFO( "LM Results in " << t.getreset().count() << " seconds:\n" << alignment.matrix() );
//NRT_INFO( "Converged = " << myRegistrationLM.converged() );
//alignment = myRegistrationSVD.align( downSampledCloud, firstCloud, lastAlignment );
//NRT_INFO( "SVD Results in " << t.getreset().count() << " seconds:\n" << alignment.matrix() );
//NRT_INFO( "Converged = " << myRegistrationSVD.converged() );
lastAlignment = alignment;
previousCloud = downSampledCloud;
// transform for visualization and appending
transformPointCloudInPlace( cloud, alignment );
transformPointCloudInPlace( downSampledCloud, alignment );
cloudRotated = cloud;
downSampledCloudRotated = downSampledCloud;
// Render the first cloud
graphics::PointCloud drawCloud( firstCloudDense, 0.005f );
drawCloud.render( *window );
}
// Render the current cloud
graphics::PointCloud drawCloud( cloud, 3.0f );
drawCloud.render( *window );
window->renderFrame();
}
}
{ NRT_WARNING(" Received Parameter exception:\n" << e.what()); }
return 0;
}
#else // no openni
int main(int argc, char const * argv[] )
{
NRT_INFO("No OpenNI support");
return 0;
}
#endif // NRT_HAVE_OPENNI