Point Cloud Library (PCL)
1.3.1
|
00001 /* 00002 * Software License Agreement (BSD License) 00003 * 00004 * Copyright (c) 2010, Willow Garage, Inc. 00005 * All rights reserved. 00006 * 00007 * Redistribution and use in source and binary forms, with or without 00008 * modification, are permitted provided that the following conditions 00009 * are met: 00010 * 00011 * * Redistributions of source code must retain the above copyright 00012 * notice, this list of conditions and the following disclaimer. 00013 * * Redistributions in binary form must reproduce the above 00014 * copyright notice, this list of conditions and the following 00015 * disclaimer in the documentation and/or other materials provided 00016 * with the distribution. 00017 * * Neither the name of Willow Garage, Inc. nor the names of its 00018 * contributors may be used to endorse or promote products derived 00019 * from this software without specific prior written permission. 00020 * 00021 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 00022 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 00023 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 00024 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 00025 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 00026 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 00027 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 00028 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 00029 * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 00030 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 00031 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 00032 * POSSIBILITY OF SUCH DAMAGE. 00033 * 00034 * $Id: nn_classification.h 1378 2011-06-19 11:24:09Z svn $ 00035 * 00036 */ 00037 00038 #ifndef NNCLASSIFICATION_H_ 00039 #define NNCLASSIFICATION_H_ 00040 00041 #include <cstdlib> 00042 #include <cfloat> 00043 #include <algorithm> 00044 #include <boost/foreach.hpp> 00045 #include <boost/shared_ptr.hpp> 00046 #include <pcl/kdtree/kdtree_flann.h> 00047 00048 namespace pcl 00049 { 00056 template <typename PointT> 00057 class NNClassification 00058 { 00059 private: 00060 00061 typename pcl::KdTree<PointT>::Ptr tree_; 00062 00064 std::vector<std::string> classes_; 00066 std::vector<int> labels_idx_; 00067 00068 public: 00069 00071 typedef std::pair<std::vector<std::string>, std::vector<float> > Result; 00072 typedef boost::shared_ptr<Result> ResultPtr; 00073 00074 // TODO setIndices method, distance metrics and reset tree 00075 00079 void setTrainingFeatures (const typename pcl::PointCloud<PointT>::ConstPtr &features) 00080 { 00081 // Do not limit the number of dimensions used in the tree 00082 typename pcl::CustomPointRepresentation<PointT>::Ptr cpr (new pcl::CustomPointRepresentation<PointT> (INT_MAX, 0)); 00083 tree_.reset (new pcl::KdTreeFLANN<PointT>); 00084 tree_->setPointRepresentation (cpr); 00085 tree_->setInputCloud (features); 00086 } 00087 00092 void setTrainingLabelIndicesAndLUT (const std::vector<std::string> &classes, const std::vector<int> &labels_idx) 00093 { 00094 // TODO check if min/max index is inside classes? 00095 classes_ = classes; 00096 labels_idx_ = labels_idx; 00097 } 00098 00105 void setTrainingLabels (const std::vector<std::string> &labels) 00106 { 00107 // Create a list of unique labels 00108 classes_ = labels; 00109 std::sort (classes_.begin(), classes_.end()); 00110 classes_.erase (std::unique (classes_.begin(), classes_.end()), classes_.end()); 00111 00112 // Save the mapping from labels to indices in the class list 00113 std::map<std::string, int> label2idx; 00114 for (std::vector<std::string>::const_iterator it = classes_.begin (); it != classes_.end (); it++) 00115 label2idx[*it] = it - classes_.begin (); 00116 00117 // Create a list holding the class index of each label 00118 labels_idx_.reserve (labels.size ()); 00119 BOOST_FOREACH (std::string s, labels) 00120 labels_idx_.push_back (label2idx[s]); 00121 // for (std::vector<std::string>::const_iterator it = labels.begin (); it != labels.end (); it++) 00122 // labels_idx_.push_back (label2idx[*it]); 00123 } 00124 00130 bool loadTrainingFeatures(std::string file_name, std::string labels_file_name) 00131 { 00132 typename pcl::PointCloud<PointT>::Ptr cloud (new pcl::PointCloud<PointT>); 00133 if (pcl::io::loadPCDFile (file_name.c_str (), *cloud) != 0) 00134 return false; 00135 std::vector<std::string> labels; 00136 std::ifstream f (labels_file_name.c_str ()); 00137 std::string label; 00138 while (getline (f, label)) 00139 if (label.size () > 0) 00140 labels.push_back(label); 00141 if (labels.size () != cloud->points.size ()) 00142 return false; 00143 setTrainingFeatures (cloud); 00144 setTrainingLabels (labels); 00145 return true; 00146 } 00147 00153 bool saveTrainingFeatures(std::string file_name, std::string labels_file_name) 00154 { 00155 typename pcl::PointCloud<PointT>::ConstPtr training_features = tree_->getInputCloud (); 00156 if (labels_idx_.size () == training_features->points.size ()) 00157 { 00158 if (pcl::io::savePCDFile (file_name.c_str (), *training_features) != 0) 00159 return false; 00160 std::ofstream f (labels_file_name.c_str ()); 00161 BOOST_FOREACH (int i, labels_idx_) 00162 f << classes_[i] << "\n"; 00163 return true; 00164 } 00165 return false; 00166 } 00167 00175 ResultPtr classify (const PointT &p_q, double radius, float gaussian_param, int max_nn = INT_MAX) 00176 { 00177 std::vector<int> k_indices; 00178 std::vector<float> k_sqr_distances; 00179 getSimilarExemplars (p_q, radius, k_indices, k_sqr_distances, max_nn); 00180 return getGaussianBestScores (gaussian_param, k_indices, k_sqr_distances); 00181 } 00182 00191 int getKNearestExemplars (const PointT &p_q, int k, std::vector<int> &k_indices, std::vector<float> &k_sqr_distances) 00192 { 00193 k_indices.resize (k); 00194 k_sqr_distances.resize (k); 00195 return tree_->nearestKSearch (p_q, k, k_indices, k_sqr_distances); 00196 } 00197 00206 int getSimilarExemplars (const PointT &p_q, double radius, std::vector<int> &k_indices, 00207 std::vector<float> &k_sqr_distances, int max_nn = INT_MAX) 00208 { 00209 return tree_->radiusSearch (p_q, radius, k_indices, k_sqr_distances, max_nn); 00210 } 00211 00217 boost::shared_ptr<std::vector<float> > getSmallestSquaredDistances (std::vector<int> &k_indices, std::vector<float> &k_sqr_distances) 00218 { 00219 // Reserve space for distances 00220 boost::shared_ptr<std::vector<float> > sqr_distances (new std::vector<float> (classes_.size (), FLT_MAX)); 00221 00222 // Select square distance to each class 00223 for (std::vector<int>::const_iterator i = k_indices.begin (); i != k_indices.end (); ++i) 00224 if ((*sqr_distances)[labels_idx_[*i]] > k_sqr_distances[i - k_indices.begin ()]) 00225 (*sqr_distances)[labels_idx_[*i]] = k_sqr_distances[i - k_indices.begin ()]; 00226 return sqr_distances; 00227 } 00228 00235 ResultPtr getLinearBestScores (std::vector<int> &k_indices, std::vector<float> &k_sqr_distances) 00236 { 00237 // Get smallest squared distances and transform them to a score for each class 00238 boost::shared_ptr<std::vector<float> > sqr_distances = getSmallestSquaredDistances (k_indices, k_sqr_distances); 00239 00240 // Transform distances to scores 00241 double sum_dist = 0; 00242 boost::shared_ptr<std::pair<std::vector<std::string>, std::vector<float> > > result (new std::pair<std::vector<std::string>, std::vector<float> > ()); 00243 result->first.reserve (classes_.size ()); 00244 result->second.reserve (classes_.size ()); 00245 for (std::vector<float>::const_iterator it = sqr_distances->begin (); it != sqr_distances->end (); ++it) 00246 if (*it != FLT_MAX) 00247 { 00248 result->first.push_back (classes_[it - sqr_distances->begin ()]); 00249 result->second.push_back (sqrt (*it)); 00250 sum_dist += result->second.back (); 00251 } 00252 for (std::vector<float>::iterator it = result->second.begin (); it != result->second.end (); ++it) 00253 *it = 1 - *it/sum_dist; 00254 00255 // Return label/score list pair 00256 return result; 00257 } 00258 00265 ResultPtr getGaussianBestScores (float gaussian_param, std::vector<int> &k_indices, std::vector<float> &k_sqr_distances) 00266 { 00267 // Get smallest squared distances and transform them to a score for each class 00268 boost::shared_ptr<std::vector<float> > sqr_distances = getSmallestSquaredDistances (k_indices, k_sqr_distances); 00269 00270 // Transform distances to scores 00271 boost::shared_ptr<std::pair<std::vector<std::string>, std::vector<float> > > result (new std::pair<std::vector<std::string>, std::vector<float> > ()); 00272 result->first.reserve (classes_.size ()); 00273 result->second.reserve (classes_.size ()); 00274 for (std::vector<float>::const_iterator it = sqr_distances->begin (); it != sqr_distances->end (); ++it) 00275 if (*it != FLT_MAX) 00276 { 00277 result->first.push_back (classes_[it - sqr_distances->begin ()]); 00278 // TODO leave it squared, and relate param to sigma... 00279 result->second.push_back (exp (-sqrt(*it)/gaussian_param)); 00280 } 00281 00282 // Return label/score list pair 00283 return result; 00284 } 00285 }; 00286 } 00287 00288 #endif /* NNCLASSIFICATION_H_ */