iLab Neuromorphic Robotics Toolkit  0.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
DepthToCloud.H
Go to the documentation of this file.
1 /*! @file
2  @author Shane Grant
3  @copyright GNU Public License (GPL v3)
4  @section License
5  @verbatim
6  // ////////////////////////////////////////////////////////////////////////
7  // The iLab Neuromorphic Robotics Toolkit (NRT) //
8  // Copyright 2010-2012 by the University of Southern California (USC) //
9  // and the iLab at USC. //
10  // //
11  // iLab - University of Southern California //
12  // Hedco Neurociences Building, Room HNB-10 //
13  // Los Angeles, Ca 90089-2520 - USA //
14  // //
15  // See http://ilab.usc.edu for information about this project. //
16  // ////////////////////////////////////////////////////////////////////////
17  // This file is part of The iLab Neuromorphic Robotics Toolkit. //
18  // //
19  // The iLab Neuromorphic Robotics Toolkit is free software: you can //
20  // redistribute it and/or modify it under the terms of the GNU General //
21  // Public License as published by the Free Software Foundation, either //
22  // version 3 of the License, or (at your option) any later version. //
23  // //
24  // The iLab Neuromorphic Robotics Toolkit is distributed in the hope //
25  // that it will be useful, but WITHOUT ANY WARRANTY; without even the //
26  // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR //
27  // PURPOSE. See the GNU General Public License for more details. //
28  // //
29  // You should have received a copy of the GNU General Public License //
30  // along with The iLab Neuromorphic Robotics Toolkit. If not, see //
31  // <http://www.gnu.org/licenses/>. //
32  // ////////////////////////////////////////////////////////////////////////
33  @endverbatim */
34 
35 #ifdef NRT_HAVE_CLOUD
36 #ifndef INCLUDE_NRT_POINTCLOUD2_IO_DEPTHTOCLOUD_H
37 #define INCLUDE_NRT_POINTCLOUD2_IO_DEPTHTOCLOUD_H
38 
40 #include <nrt/Core/Image/Image.H>
41 
42 namespace nrt
43 {
44  //! Converts a depth image to a point cloud
45  /*! This will create a geometry only point cloud based upon the depth image and
46  its device parameters.
47 
48  The coordinate system of the returned cloud will be EAST-NORTH-UP, that is
49  moving forward will be positve Y, moving upwards will be positive Z, and
50  moving horizontally to the right will be positive X.
51 
52  Invalid range readings should be marked by a quiet_NaN. The default behavior
53  is to keep nans in the final point cloud to preserve the structure of the depth image.
54 
55  @tparam T The base type for the pixels
56  @param depthImage The depth image
57  @param focalLength The focal length of the depth device in pixels
58  @param keepNaN Whether invalid (nan) pixels in the depth image should be kept in the point cloud
59  @return A point cloud representation of the depth image */
60  template <class T>
61  PointCloud2 depthToPointCloud( Image<PixGray<T>> const depthImage, float focalLength, const bool keepNaN = true );
62 
63  //! Converts a depth image with color data to a point cloud
64  /*! This will create a geometry and PixRGB<T1> (as a dense field) point cloud based
65  upon the depth image and its device parameters. The depth image should be properly registered.
66 
67  The coordinate system of the returned cloud will be EAST-NORTH-UP, that is
68  moving forward will be positve Y, moving upwards will be positive Z, and
69  moving horizontally to the right will be positive X.
70 
71  Invalid range readings should be marked by a quiet_NaN. The default behavior
72  is to keep nans in the final point cloud to preserve the structure of the depth image.
73 
74  @tparam T1 The base type for the rgb pixels
75  @tparam T2 The base type for the depth pixels
76  @param depthImage The depth (and color) image.
77  @param focalLength The focal length (in pixels) of whichever sensor the data is registered to.
78  @param keepNaN Whether invalid (nan) pixels in the depth image should be kept in the point cloud
79  @return A point cloud representation of the depth image */
80  template <class T1, class T2>
81  PointCloud2 depthToPointCloud( Image<PixRGBD<T1, T2>> const depthImage, float focalLength, const bool keepNaN=true );
82 
83  //! Converts a depth image with color data to a point cloud, version for split RGB and Depth data
84  /*! This will create a geometry and PixRGB<T1> (as a dense field) point cloud based
85  upon the depth image and its device parameters. The depth image should be properly registered.
86 
87  The coordinate system of the returned cloud will be EAST-NORTH-UP, that is
88  moving forward will be positve Y, moving upwards will be positive Z, and
89  moving horizontally to the right will be positive X.
90 
91  Invalid range readings should be marked by a quiet_NaN. The default behavior
92  is to keep nans in the final point cloud to preserve the structure of the depth image.
93 
94  @tparam T1 The base type for the rgb pixels
95  @tparam T2 The base type for the depth pixels
96  @param rgbImage The color image. Dims must match between color and depth images.
97  @param depthImage The depth image.
98  @param focalLength The focal length (in pixels) of whichever sensor the data is registered to.
99  @param keepNaN Whether invalid (nan) pixels in the depth image should be kept in the point cloud
100  @return A point cloud representation of the depth image */
101  template <class T1, class T2>
102  PointCloud2 depthToPointCloud( Image<PixRGB<T1>> const rgbImage, Image<PixGray<T2>> const depthImage,
103  float focalLength, const bool keepNaN = true );
104 } // namespace nrt
105 
107 
108 #endif // INCLUDE_NRT_POINTCLOUD2_IO_DEPTHTOCLOUD_H
109 #endif // NRT_HAVE_CLOUD