Point Cloud Library (PCL)
1.7.0
|
00001 /* 00002 * Software License Agreement (BSD License) 00003 * 00004 * Point Cloud Library (PCL) - www.pointclouds.org 00005 * Copyright (c) 2012 Aitor Aldoma, Federico Tombari 00006 * 00007 * All rights reserved. 00008 * 00009 * Redistribution and use in source and binary forms, with or without 00010 * modification, are permitted provided that the following conditions 00011 * are met: 00012 * 00013 * * Redistributions of source code must retain the above copyright 00014 * notice, this list of conditions and the following disclaimer. 00015 * * Redistributions in binary form must reproduce the above 00016 * copyright notice, this list of conditions and the following 00017 * disclaimer in the documentation and/or other materials provided 00018 * with the distribution. 00019 * * Neither the name of Willow Garage, Inc. nor the names of its 00020 * contributors may be used to endorse or promote products derived 00021 * from this software without specific prior written permission. 00022 * 00023 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 00024 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 00025 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 00026 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 00027 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 00028 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 00029 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 00030 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 00031 * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 00032 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 00033 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 00034 * POSSIBILITY OF SUCH DAMAGE. 00035 */ 00036 00037 #include <pcl/recognition/hv/hv_go.h> 00038 #include <numeric> 00039 #include <pcl/common/time.h> 00040 #include <pcl/point_types.h> 00041 00042 template<typename PointT, typename NormalT> 00043 inline void extractEuclideanClustersSmooth(const typename pcl::PointCloud<PointT> &cloud, const typename pcl::PointCloud<NormalT> &normals, float tolerance, 00044 const typename pcl::search::Search<PointT>::Ptr &tree, std::vector<pcl::PointIndices> &clusters, double eps_angle, float curvature_threshold, 00045 unsigned int min_pts_per_cluster, unsigned int max_pts_per_cluster = (std::numeric_limits<int>::max) ()) 00046 { 00047 00048 if (tree->getInputCloud ()->points.size () != cloud.points.size ()) 00049 { 00050 PCL_ERROR("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset\n"); 00051 return; 00052 } 00053 if (cloud.points.size () != normals.points.size ()) 00054 { 00055 PCL_ERROR("[pcl::extractEuclideanClusters] Number of points in the input point cloud different than normals!\n"); 00056 return; 00057 } 00058 00059 // Create a bool vector of processed point indices, and initialize it to false 00060 std::vector<bool> processed (cloud.points.size (), false); 00061 00062 std::vector<int> nn_indices; 00063 std::vector<float> nn_distances; 00064 // Process all points in the indices vector 00065 int size = static_cast<int> (cloud.points.size ()); 00066 for (int i = 0; i < size; ++i) 00067 { 00068 if (processed[i]) 00069 continue; 00070 00071 std::vector<unsigned int> seed_queue; 00072 int sq_idx = 0; 00073 seed_queue.push_back (i); 00074 00075 processed[i] = true; 00076 00077 while (sq_idx < static_cast<int> (seed_queue.size ())) 00078 { 00079 00080 if (normals.points[seed_queue[sq_idx]].curvature > curvature_threshold) 00081 { 00082 sq_idx++; 00083 continue; 00084 } 00085 00086 // Search for sq_idx 00087 if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances)) 00088 { 00089 sq_idx++; 00090 continue; 00091 } 00092 00093 for (size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx 00094 { 00095 if (processed[nn_indices[j]]) // Has this point been processed before ? 00096 continue; 00097 00098 if (normals.points[nn_indices[j]].curvature > curvature_threshold) 00099 { 00100 continue; 00101 } 00102 00103 //processed[nn_indices[j]] = true; 00104 // [-1;1] 00105 00106 double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0] 00107 + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1] 00108 + normals.points[seed_queue[sq_idx]].normal[2] * normals.points[nn_indices[j]].normal[2]; 00109 00110 if (fabs (acos (dot_p)) < eps_angle) 00111 { 00112 processed[nn_indices[j]] = true; 00113 seed_queue.push_back (nn_indices[j]); 00114 } 00115 } 00116 00117 sq_idx++; 00118 } 00119 00120 // If this queue is satisfactory, add to the clusters 00121 if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster) 00122 { 00123 pcl::PointIndices r; 00124 r.indices.resize (seed_queue.size ()); 00125 for (size_t j = 0; j < seed_queue.size (); ++j) 00126 r.indices[j] = seed_queue[j]; 00127 00128 std::sort (r.indices.begin (), r.indices.end ()); 00129 r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ()); 00130 00131 r.header = cloud.header; 00132 clusters.push_back (r); // We could avoid a copy by working directly in the vector 00133 } 00134 } 00135 } 00136 00137 template<typename ModelT, typename SceneT> 00138 mets::gol_type pcl::GlobalHypothesesVerification<ModelT, SceneT>::evaluateSolution(const std::vector<bool> & active, int changed) 00139 { 00140 float sign = 1.f; 00141 //update explained_by_RM 00142 if (active[changed]) 00143 { 00144 //it has been activated 00145 updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_, 00146 explained_by_RM_distance_weighted, 1.f); 00147 updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights, 00148 unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, 1.f); 00149 updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, 1.f); 00150 } else 00151 { 00152 //it has been deactivated 00153 updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_, 00154 explained_by_RM_distance_weighted, -1.f); 00155 updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights, 00156 unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, -1.f); 00157 updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, -1.f); 00158 sign = -1.f; 00159 } 00160 00161 int duplicity = getDuplicity (); 00162 float good_info = getExplainedValue (); 00163 00164 float unexplained_info = getPreviousUnexplainedValue (); 00165 float bad_info = static_cast<float> (getPreviousBadInfo ()) 00166 + (recognition_models_[changed]->outliers_weight_ * static_cast<float> (recognition_models_[changed]->bad_information_)) * sign; 00167 00168 setPreviousBadInfo (bad_info); 00169 00170 int n_active_hyp = 0; 00171 for(size_t i=0; i < active.size(); i++) { 00172 if(active[i]) 00173 n_active_hyp++; 00174 } 00175 00176 float duplicity_cm = static_cast<float> (getDuplicityCM ()) * w_occupied_multiple_cm_; 00177 return static_cast<mets::gol_type> ((good_info - bad_info - static_cast<float> (duplicity) - unexplained_info - duplicity_cm - static_cast<float> (n_active_hyp)) * -1.f); //return the dual to our max problem 00178 } 00179 00180 /////////////////////////////////////////////////////////////////////////////////////////////////// 00181 template<typename ModelT, typename SceneT> 00182 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::initialize() 00183 { 00184 //clear stuff 00185 recognition_models_.clear (); 00186 unexplained_by_RM_neighboorhods.clear (); 00187 explained_by_RM_distance_weighted.clear (); 00188 explained_by_RM_.clear (); 00189 mask_.clear (); 00190 indices_.clear (), 00191 complete_cloud_occupancy_by_RM_.clear (); 00192 00193 // initialize mask to false 00194 mask_.resize (complete_models_.size ()); 00195 for (size_t i = 0; i < complete_models_.size (); i++) 00196 mask_[i] = false; 00197 00198 indices_.resize (complete_models_.size ()); 00199 00200 NormalEstimator_ n3d; 00201 scene_normals_.reset (new pcl::PointCloud<pcl::Normal> ()); 00202 00203 typename pcl::search::KdTree<SceneT>::Ptr normals_tree (new pcl::search::KdTree<SceneT>); 00204 normals_tree->setInputCloud (scene_cloud_downsampled_); 00205 00206 n3d.setRadiusSearch (radius_normals_); 00207 n3d.setSearchMethod (normals_tree); 00208 n3d.setInputCloud (scene_cloud_downsampled_); 00209 n3d.compute (*scene_normals_); 00210 00211 //check nans... 00212 int j = 0; 00213 for (size_t i = 0; i < scene_normals_->points.size (); ++i) 00214 { 00215 if (!pcl_isfinite (scene_normals_->points[i].normal_x) || !pcl_isfinite (scene_normals_->points[i].normal_y) 00216 || !pcl_isfinite (scene_normals_->points[i].normal_z)) 00217 continue; 00218 00219 scene_normals_->points[j] = scene_normals_->points[i]; 00220 scene_cloud_downsampled_->points[j] = scene_cloud_downsampled_->points[i]; 00221 00222 j++; 00223 } 00224 00225 scene_normals_->points.resize (j); 00226 scene_normals_->width = j; 00227 scene_normals_->height = 1; 00228 00229 scene_cloud_downsampled_->points.resize (j); 00230 scene_cloud_downsampled_->width = j; 00231 scene_cloud_downsampled_->height = 1; 00232 00233 explained_by_RM_.resize (scene_cloud_downsampled_->points.size (), 0); 00234 explained_by_RM_distance_weighted.resize (scene_cloud_downsampled_->points.size (), 0.f); 00235 unexplained_by_RM_neighboorhods.resize (scene_cloud_downsampled_->points.size (), 0.f); 00236 00237 //compute segmentation of the scene if detect_clutter_ 00238 if (detect_clutter_) 00239 { 00240 //initialize kdtree for search 00241 scene_downsampled_tree_.reset (new pcl::search::KdTree<SceneT>); 00242 scene_downsampled_tree_->setInputCloud (scene_cloud_downsampled_); 00243 00244 std::vector<pcl::PointIndices> clusters; 00245 double eps_angle_threshold = 0.2; 00246 int min_points = 20; 00247 float curvature_threshold = 0.045f; 00248 00249 extractEuclideanClustersSmooth<SceneT, pcl::Normal> (*scene_cloud_downsampled_, *scene_normals_, inliers_threshold_ * 2.f, scene_downsampled_tree_, 00250 clusters, eps_angle_threshold, curvature_threshold, min_points); 00251 00252 clusters_cloud_.reset (new pcl::PointCloud<pcl::PointXYZI>); 00253 clusters_cloud_->points.resize (scene_cloud_downsampled_->points.size ()); 00254 clusters_cloud_->width = scene_cloud_downsampled_->width; 00255 clusters_cloud_->height = 1; 00256 00257 for (size_t i = 0; i < scene_cloud_downsampled_->points.size (); i++) 00258 { 00259 pcl::PointXYZI p; 00260 p.getVector3fMap () = scene_cloud_downsampled_->points[i].getVector3fMap (); 00261 p.intensity = 0.f; 00262 clusters_cloud_->points[i] = p; 00263 } 00264 00265 float intens_incr = 100.f / static_cast<float> (clusters.size ()); 00266 float intens = intens_incr; 00267 for (size_t i = 0; i < clusters.size (); i++) 00268 { 00269 for (size_t j = 0; j < clusters[i].indices.size (); j++) 00270 { 00271 clusters_cloud_->points[clusters[i].indices[j]].intensity = intens; 00272 } 00273 00274 intens += intens_incr; 00275 } 00276 } 00277 00278 //compute cues 00279 { 00280 pcl::ScopeTime tcues ("Computing cues"); 00281 recognition_models_.resize (complete_models_.size ()); 00282 int valid = 0; 00283 for (int i = 0; i < static_cast<int> (complete_models_.size ()); i++) 00284 { 00285 //create recognition model 00286 recognition_models_[valid].reset (new RecognitionModel ()); 00287 if(addModel (visible_models_[i], complete_models_[i], recognition_models_[valid])) { 00288 indices_[valid] = i; 00289 valid++; 00290 } 00291 } 00292 00293 recognition_models_.resize(valid); 00294 indices_.resize(valid); 00295 } 00296 00297 //compute the bounding boxes for the models 00298 ModelT min_pt_all, max_pt_all; 00299 min_pt_all.x = min_pt_all.y = min_pt_all.z = std::numeric_limits<float>::max (); 00300 max_pt_all.x = max_pt_all.y = max_pt_all.z = (std::numeric_limits<float>::max () - 0.001f) * -1; 00301 00302 for (size_t i = 0; i < recognition_models_.size (); i++) 00303 { 00304 ModelT min_pt, max_pt; 00305 pcl::getMinMax3D (*complete_models_[indices_[i]], min_pt, max_pt); 00306 if (min_pt.x < min_pt_all.x) 00307 min_pt_all.x = min_pt.x; 00308 00309 if (min_pt.y < min_pt_all.y) 00310 min_pt_all.y = min_pt.y; 00311 00312 if (min_pt.z < min_pt_all.z) 00313 min_pt_all.z = min_pt.z; 00314 00315 if (max_pt.x > max_pt_all.x) 00316 max_pt_all.x = max_pt.x; 00317 00318 if (max_pt.y > max_pt_all.y) 00319 max_pt_all.y = max_pt.y; 00320 00321 if (max_pt.z > max_pt_all.z) 00322 max_pt_all.z = max_pt.z; 00323 } 00324 00325 int size_x, size_y, size_z; 00326 size_x = static_cast<int> (std::ceil (std::abs (max_pt_all.x - min_pt_all.x) / res_occupancy_grid_)) + 1; 00327 size_y = static_cast<int> (std::ceil (std::abs (max_pt_all.y - min_pt_all.y) / res_occupancy_grid_)) + 1; 00328 size_z = static_cast<int> (std::ceil (std::abs (max_pt_all.z - min_pt_all.z) / res_occupancy_grid_)) + 1; 00329 00330 complete_cloud_occupancy_by_RM_.resize (size_x * size_y * size_z, 0); 00331 00332 for (size_t i = 0; i < recognition_models_.size (); i++) 00333 { 00334 00335 std::map<int, bool> banned; 00336 std::map<int, bool>::iterator banned_it; 00337 00338 for (size_t j = 0; j < complete_models_[indices_[i]]->points.size (); j++) 00339 { 00340 int pos_x, pos_y, pos_z; 00341 pos_x = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].x - min_pt_all.x) / res_occupancy_grid_)); 00342 pos_y = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].y - min_pt_all.y) / res_occupancy_grid_)); 00343 pos_z = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].z - min_pt_all.z) / res_occupancy_grid_)); 00344 00345 int idx = pos_z * size_x * size_y + pos_y * size_x + pos_x; 00346 banned_it = banned.find (idx); 00347 if (banned_it == banned.end ()) 00348 { 00349 complete_cloud_occupancy_by_RM_[idx]++; 00350 recognition_models_[i]->complete_cloud_occupancy_indices_.push_back (idx); 00351 banned[idx] = true; 00352 } 00353 } 00354 } 00355 00356 { 00357 pcl::ScopeTime tcues ("Computing clutter cues"); 00358 #pragma omp parallel for schedule(dynamic, 4) num_threads(omp_get_num_procs()) 00359 for (int j = 0; j < static_cast<int> (recognition_models_.size ()); j++) 00360 computeClutterCue (recognition_models_[j]); 00361 } 00362 00363 cc_.clear (); 00364 n_cc_ = 1; 00365 cc_.resize (n_cc_); 00366 for (size_t i = 0; i < recognition_models_.size (); i++) 00367 cc_[0].push_back (static_cast<int> (i)); 00368 00369 } 00370 00371 template<typename ModelT, typename SceneT> 00372 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::SAOptimize(std::vector<int> & cc_indices, std::vector<bool> & initial_solution) 00373 { 00374 00375 //temporal copy of recogniton_models_ 00376 std::vector < boost::shared_ptr<RecognitionModel> > recognition_models_copy; 00377 recognition_models_copy = recognition_models_; 00378 00379 recognition_models_.clear (); 00380 00381 for (size_t j = 0; j < cc_indices.size (); j++) 00382 { 00383 recognition_models_.push_back (recognition_models_copy[cc_indices[j]]); 00384 } 00385 00386 for (size_t j = 0; j < recognition_models_.size (); j++) 00387 { 00388 boost::shared_ptr < RecognitionModel > recog_model = recognition_models_[j]; 00389 for (size_t i = 0; i < recog_model->explained_.size (); i++) 00390 { 00391 explained_by_RM_[recog_model->explained_[i]]++; 00392 explained_by_RM_distance_weighted[recog_model->explained_[i]] += recog_model->explained_distances_[i]; 00393 } 00394 00395 if (detect_clutter_) 00396 { 00397 for (size_t i = 0; i < recog_model->unexplained_in_neighborhood.size (); i++) 00398 { 00399 unexplained_by_RM_neighboorhods[recog_model->unexplained_in_neighborhood[i]] += recog_model->unexplained_in_neighborhood_weights[i]; 00400 } 00401 } 00402 } 00403 00404 int occupied_multiple = 0; 00405 for(size_t i=0; i < complete_cloud_occupancy_by_RM_.size(); i++) { 00406 if(complete_cloud_occupancy_by_RM_[i] > 1) { 00407 occupied_multiple+=complete_cloud_occupancy_by_RM_[i]; 00408 } 00409 } 00410 00411 setPreviousDuplicityCM(occupied_multiple); 00412 //do optimization 00413 //Define model SAModel, initial solution is all models activated 00414 00415 int duplicity; 00416 float good_information_ = getTotalExplainedInformation (explained_by_RM_, explained_by_RM_distance_weighted, &duplicity); 00417 float bad_information_ = 0; 00418 float unexplained_in_neighboorhod = getUnexplainedInformationInNeighborhood (unexplained_by_RM_neighboorhods, explained_by_RM_); 00419 00420 for (size_t i = 0; i < initial_solution.size (); i++) 00421 { 00422 if (initial_solution[i]) 00423 bad_information_ += recognition_models_[i]->outliers_weight_ * static_cast<float> (recognition_models_[i]->bad_information_); 00424 } 00425 00426 setPreviousExplainedValue (good_information_); 00427 setPreviousDuplicity (duplicity); 00428 setPreviousBadInfo (bad_information_); 00429 setPreviousUnexplainedValue (unexplained_in_neighboorhod); 00430 00431 SAModel model; 00432 model.cost_ = static_cast<mets::gol_type> ((good_information_ - bad_information_ 00433 - static_cast<float> (duplicity) 00434 - static_cast<float> (occupied_multiple) * w_occupied_multiple_cm_ 00435 - static_cast<float> (recognition_models_.size ()) 00436 - unexplained_in_neighboorhod) * -1.f); 00437 00438 model.setSolution (initial_solution); 00439 model.setOptimizer (this); 00440 SAModel best (model); 00441 00442 move_manager neigh (static_cast<int> (cc_indices.size ())); 00443 00444 mets::best_ever_solution best_recorder (best); 00445 mets::noimprove_termination_criteria noimprove (max_iterations_); 00446 mets::linear_cooling linear_cooling; 00447 mets::simulated_annealing<move_manager> sa (model, best_recorder, neigh, noimprove, linear_cooling, initial_temp_, 1e-7, 2); 00448 sa.setApplyAndEvaluate(true); 00449 00450 { 00451 pcl::ScopeTime t ("SA search..."); 00452 sa.search (); 00453 } 00454 00455 best_seen_ = static_cast<const SAModel&> (best_recorder.best_seen ()); 00456 for (size_t i = 0; i < best_seen_.solution_.size (); i++) 00457 { 00458 initial_solution[i] = best_seen_.solution_[i]; 00459 } 00460 00461 recognition_models_ = recognition_models_copy; 00462 00463 } 00464 00465 /////////////////////////////////////////////////////////////////////////////////////////////////// 00466 template<typename ModelT, typename SceneT> 00467 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::verify() 00468 { 00469 initialize (); 00470 00471 //for each connected component, find the optimal solution 00472 for (int c = 0; c < n_cc_; c++) 00473 { 00474 //TODO: Check for trivial case... 00475 //TODO: Check also the number of hypotheses and use exhaustive enumeration if smaller than 10 00476 std::vector<bool> subsolution (cc_[c].size (), true); 00477 SAOptimize (cc_[c], subsolution); 00478 for (size_t i = 0; i < subsolution.size (); i++) 00479 { 00480 mask_[indices_[cc_[c][i]]] = (subsolution[i]); 00481 } 00482 } 00483 } 00484 00485 template<typename ModelT, typename SceneT> 00486 bool pcl::GlobalHypothesesVerification<ModelT, SceneT>::addModel(typename pcl::PointCloud<ModelT>::ConstPtr & model, 00487 typename pcl::PointCloud<ModelT>::ConstPtr & complete_model, boost::shared_ptr<RecognitionModel> & recog_model) 00488 { 00489 //voxelize model cloud 00490 recog_model->cloud_.reset (new pcl::PointCloud<ModelT> ()); 00491 recog_model->complete_cloud_.reset (new pcl::PointCloud<ModelT> ()); 00492 00493 float size_model = resolution_; 00494 pcl::VoxelGrid<ModelT> voxel_grid; 00495 voxel_grid.setInputCloud (model); 00496 voxel_grid.setLeafSize (size_model, size_model, size_model); 00497 voxel_grid.filter (*(recog_model->cloud_)); 00498 00499 pcl::VoxelGrid<ModelT> voxel_grid2; 00500 voxel_grid2.setInputCloud (complete_model); 00501 voxel_grid2.setLeafSize (size_model, size_model, size_model); 00502 voxel_grid2.filter (*(recog_model->complete_cloud_)); 00503 00504 { 00505 //check nans... 00506 int j = 0; 00507 for (size_t i = 0; i < recog_model->cloud_->points.size (); ++i) 00508 { 00509 if (!pcl_isfinite (recog_model->cloud_->points[i].x) || !pcl_isfinite (recog_model->cloud_->points[i].y) 00510 || !pcl_isfinite (recog_model->cloud_->points[i].z)) 00511 continue; 00512 00513 recog_model->cloud_->points[j] = recog_model->cloud_->points[i]; 00514 j++; 00515 } 00516 00517 recog_model->cloud_->points.resize (j); 00518 recog_model->cloud_->width = j; 00519 recog_model->cloud_->height = 1; 00520 } 00521 00522 if (recog_model->cloud_->points.size () <= 0) 00523 { 00524 PCL_WARN("The model cloud has no points..\n"); 00525 return false; 00526 } 00527 00528 //compute normals unless given (now do it always...) 00529 typename pcl::search::KdTree<ModelT>::Ptr normals_tree (new pcl::search::KdTree<ModelT>); 00530 typedef typename pcl::NormalEstimation<ModelT, pcl::Normal> NormalEstimator_; 00531 NormalEstimator_ n3d; 00532 recog_model->normals_.reset (new pcl::PointCloud<pcl::Normal> ()); 00533 normals_tree->setInputCloud (recog_model->cloud_); 00534 n3d.setRadiusSearch (radius_normals_); 00535 n3d.setSearchMethod (normals_tree); 00536 n3d.setInputCloud ((recog_model->cloud_)); 00537 n3d.compute (*(recog_model->normals_)); 00538 00539 //check nans... 00540 int j = 0; 00541 for (size_t i = 0; i < recog_model->normals_->points.size (); ++i) 00542 { 00543 if (!pcl_isfinite (recog_model->normals_->points[i].normal_x) || !pcl_isfinite (recog_model->normals_->points[i].normal_y) 00544 || !pcl_isfinite (recog_model->normals_->points[i].normal_z)) 00545 continue; 00546 00547 recog_model->normals_->points[j] = recog_model->normals_->points[i]; 00548 recog_model->cloud_->points[j] = recog_model->cloud_->points[i]; 00549 j++; 00550 } 00551 00552 recog_model->normals_->points.resize (j); 00553 recog_model->normals_->width = j; 00554 recog_model->normals_->height = 1; 00555 00556 recog_model->cloud_->points.resize (j); 00557 recog_model->cloud_->width = j; 00558 recog_model->cloud_->height = 1; 00559 00560 std::vector<int> explained_indices; 00561 std::vector<float> outliers_weight; 00562 std::vector<float> explained_indices_distances; 00563 std::vector<float> unexplained_indices_weights; 00564 00565 std::vector<int> nn_indices; 00566 std::vector<float> nn_distances; 00567 00568 std::map<int, boost::shared_ptr<std::vector<std::pair<int, float> > > > model_explains_scene_points; //which point i from the scene is explained by a points j_k with dist d_k from the model 00569 std::map<int, boost::shared_ptr<std::vector<std::pair<int, float> > > >::iterator it; 00570 00571 outliers_weight.resize (recog_model->cloud_->points.size ()); 00572 recog_model->outlier_indices_.resize (recog_model->cloud_->points.size ()); 00573 00574 size_t o = 0; 00575 for (size_t i = 0; i < recog_model->cloud_->points.size (); i++) 00576 { 00577 if (!scene_downsampled_tree_->radiusSearch (recog_model->cloud_->points[i], inliers_threshold_, nn_indices, nn_distances, std::numeric_limits<int>::max ())) 00578 { 00579 //outlier 00580 outliers_weight[o] = regularizer_; 00581 recog_model->outlier_indices_[o] = static_cast<int> (i); 00582 o++; 00583 } else 00584 { 00585 for (size_t k = 0; k < nn_distances.size (); k++) 00586 { 00587 std::pair<int, float> pair = std::make_pair (i, nn_distances[k]); //i is a index to a model point and then distance 00588 it = model_explains_scene_points.find (nn_indices[k]); 00589 if (it == model_explains_scene_points.end ()) 00590 { 00591 boost::shared_ptr < std::vector<std::pair<int, float> > > vec (new std::vector<std::pair<int, float> > ()); 00592 vec->push_back (pair); 00593 model_explains_scene_points[nn_indices[k]] = vec; 00594 } else 00595 { 00596 it->second->push_back (pair); 00597 } 00598 } 00599 } 00600 } 00601 00602 outliers_weight.resize (o); 00603 recog_model->outlier_indices_.resize (o); 00604 00605 recog_model->outliers_weight_ = (std::accumulate (outliers_weight.begin (), outliers_weight.end (), 0.f) / static_cast<float> (outliers_weight.size ())); 00606 if (outliers_weight.size () == 0) 00607 recog_model->outliers_weight_ = 1.f; 00608 00609 pcl::IndicesPtr indices_scene (new std::vector<int>); 00610 //go through the map and keep the closest model point in case that several model points explain a scene point 00611 00612 int p = 0; 00613 00614 for (it = model_explains_scene_points.begin (); it != model_explains_scene_points.end (); it++, p++) 00615 { 00616 size_t closest = 0; 00617 float min_d = std::numeric_limits<float>::min (); 00618 for (size_t i = 0; i < it->second->size (); i++) 00619 { 00620 if (it->second->at (i).second > min_d) 00621 { 00622 min_d = it->second->at (i).second; 00623 closest = i; 00624 } 00625 } 00626 00627 float d = it->second->at (closest).second; 00628 float d_weight = -(d * d / (inliers_threshold_)) + 1; 00629 00630 //it->first is index to scene point 00631 //using normals to weight inliers 00632 Eigen::Vector3f scene_p_normal = scene_normals_->points[it->first].getNormalVector3fMap (); 00633 Eigen::Vector3f model_p_normal = recog_model->normals_->points[it->second->at (closest).first].getNormalVector3fMap (); 00634 float dotp = scene_p_normal.dot (model_p_normal) * 1.f; //[-1,1] from antiparallel trough perpendicular to parallel 00635 00636 if (dotp < 0.f) 00637 dotp = 0.f; 00638 00639 explained_indices.push_back (it->first); 00640 explained_indices_distances.push_back (d_weight * dotp); 00641 00642 } 00643 00644 recog_model->bad_information_ = static_cast<int> (recog_model->outlier_indices_.size ()); 00645 recog_model->explained_ = explained_indices; 00646 recog_model->explained_distances_ = explained_indices_distances; 00647 00648 return true; 00649 } 00650 00651 template<typename ModelT, typename SceneT> 00652 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::computeClutterCue(boost::shared_ptr<RecognitionModel> & recog_model) 00653 { 00654 if (detect_clutter_) 00655 { 00656 00657 float rn_sqr = radius_neighborhood_GO_ * radius_neighborhood_GO_; 00658 std::vector<int> nn_indices; 00659 std::vector<float> nn_distances; 00660 00661 std::vector < std::pair<int, int> > neighborhood_indices; //first is indices to scene point and second is indices to explained_ scene points 00662 for (int i = 0; i < static_cast<int> (recog_model->explained_.size ()); i++) 00663 { 00664 if (scene_downsampled_tree_->radiusSearch (scene_cloud_downsampled_->points[recog_model->explained_[i]], radius_neighborhood_GO_, nn_indices, 00665 nn_distances, std::numeric_limits<int>::max ())) 00666 { 00667 for (size_t k = 0; k < nn_distances.size (); k++) 00668 { 00669 if (nn_indices[k] != i) 00670 neighborhood_indices.push_back (std::make_pair (nn_indices[k], i)); 00671 } 00672 } 00673 } 00674 00675 //sort neighborhood indices by id 00676 std::sort (neighborhood_indices.begin (), neighborhood_indices.end (), 00677 boost::bind (&std::pair<int, int>::first, _1) < boost::bind (&std::pair<int, int>::first, _2)); 00678 00679 //erase duplicated unexplained points 00680 neighborhood_indices.erase ( 00681 std::unique (neighborhood_indices.begin (), neighborhood_indices.end (), 00682 boost::bind (&std::pair<int, int>::first, _1) == boost::bind (&std::pair<int, int>::first, _2)), neighborhood_indices.end ()); 00683 00684 //sort explained points 00685 std::vector<int> exp_idces (recog_model->explained_); 00686 std::sort (exp_idces.begin (), exp_idces.end ()); 00687 00688 recog_model->unexplained_in_neighborhood.resize (neighborhood_indices.size ()); 00689 recog_model->unexplained_in_neighborhood_weights.resize (neighborhood_indices.size ()); 00690 00691 size_t p = 0; 00692 size_t j = 0; 00693 for (size_t i = 0; i < neighborhood_indices.size (); i++) 00694 { 00695 if ((j < exp_idces.size ()) && (neighborhood_indices[i].first == exp_idces[j])) 00696 { 00697 //this index is explained by the hypothesis so ignore it, advance j 00698 j++; 00699 } else 00700 { 00701 //indices_in_nb[i] < exp_idces[j] 00702 //recog_model->unexplained_in_neighborhood.push_back(neighborhood_indices[i]); 00703 recog_model->unexplained_in_neighborhood[p] = neighborhood_indices[i].first; 00704 00705 if (clusters_cloud_->points[recog_model->explained_[neighborhood_indices[i].second]].intensity != 0.f 00706 && (clusters_cloud_->points[recog_model->explained_[neighborhood_indices[i].second]].intensity 00707 == clusters_cloud_->points[neighborhood_indices[i].first].intensity)) 00708 { 00709 00710 recog_model->unexplained_in_neighborhood_weights[p] = clutter_regularizer_; 00711 00712 } else 00713 { 00714 //neighborhood_indices[i].first gives the index to the scene point and second to the explained scene point by the model causing this... 00715 //calculate weight of this clutter point based on the distance of the scene point and the model point causing it 00716 float d = static_cast<float> (pow ( 00717 (scene_cloud_downsampled_->points[recog_model->explained_[neighborhood_indices[i].second]].getVector3fMap () 00718 - scene_cloud_downsampled_->points[neighborhood_indices[i].first].getVector3fMap ()).norm (), 2)); 00719 float d_weight = -(d / rn_sqr) + 1; //points that are close have a strong weight*/ 00720 00721 //using normals to weight clutter points 00722 Eigen::Vector3f scene_p_normal = scene_normals_->points[neighborhood_indices[i].first].getNormalVector3fMap (); 00723 Eigen::Vector3f model_p_normal = scene_normals_->points[recog_model->explained_[neighborhood_indices[i].second]].getNormalVector3fMap (); 00724 float dotp = scene_p_normal.dot (model_p_normal); //[-1,1] from antiparallel trough perpendicular to parallel 00725 00726 if (dotp < 0) 00727 dotp = 0.f; 00728 00729 recog_model->unexplained_in_neighborhood_weights[p] = d_weight * dotp; 00730 } 00731 p++; 00732 } 00733 } 00734 00735 recog_model->unexplained_in_neighborhood_weights.resize (p); 00736 recog_model->unexplained_in_neighborhood.resize (p); 00737 } 00738 } 00739 00740 #define PCL_INSTANTIATE_GoHV(T1,T2) template class PCL_EXPORTS pcl::GlobalHypothesesVerification<T1,T2>;