27 #include <boost/foreach.hpp> 28 #include <boost/range/algorithm_ext/iota.hpp> 29 #include <boost/lexical_cast.hpp> 30 #include <boost/range/irange.hpp> 33 #include <robot_model_services/robot_model/RobotModel.hpp> 42 #include "asr_msgs/AsrAttributedPointCloud.h" 43 #include "asr_msgs/AsrAttributedPoint.h" 93 const robot_model_services::RobotModelPtr &robotModelPtr = robot_model_services::RobotModelPtr(),
97 mUnitSphereSamplerPtr(unitSphereSamplerPtr),
98 mMapHelperPtr(mapHelperPtr),
99 mSpaceSamplerPtr(spaceSamplerPtr),
100 mRobotModelPtr(robotModelPtr),
101 mCameraModelFilterPtr(cameraModelFilterPtr),
102 mRatingModulePtr(ratingModulePtr),
104 mNumberOfThreads(
boost::thread::hardware_concurrency()),
105 mThreadCameraModels(mNumberOfThreads),
106 mThreadRatingModules(mNumberOfThreads) {
116 auto begin = std::chrono::high_resolution_clock::now();
122 mDebugHelperPtr->write(
"Calculate discrete set of view orientations on unit sphere",
128 bool success = this->
doIteration(currentCameraViewport, feasibleOrientationsCollectionPtr, resultViewport);
130 auto finish = std::chrono::high_resolution_clock::now();
132 ROS_INFO_STREAM(
"Iteration took " << std::chrono::duration<float>(finish-begin).count() <<
" seconds.");
142 robot_model_services::RobotStatePtr currentState = mRobotModelPtr->calculateRobotState(currentCameraViewport.
getPosition(), currentCameraViewport.
getSimpleQuaternion());
144 mRobotModelPtr->setCurrentRobotState(currentState);
145 mRatingModulePtr->setRobotState(currentState);
146 for (
int i : boost::irange(0, mNumberOfThreads)) {
147 mThreadRatingModules[i]->setRobotState(currentState);
154 for(std::size_t index = 0; index < sampledSpacePointCloudPtr->size(); index++) {
156 SamplePoint &spaceSamplePoint = sampledSpacePointCloudPtr->at(index);
165 resultIndicesPtr->push_back(index);
178 for(std::size_t index = 0; index < sampleViewportsPtr->size(); index++) {
180 ViewportPoint &spaceViewport = sampleViewportsPtr->at(index);
188 resultIndicesPtr->push_back(index);
202 double radius = mCameraModelFilterPtr->getFarClippingPlane();
212 int k = mKdTreePtr->radiusSearch(comparablePoint, radius, *childIndicesPtr, dismissDistances);
220 resultIndicesPtr = childIndicesPtr;
234 return rateViewportsInternal(sampleNextBestViewportsPtr, currentCameraViewport, ratedNextBestViewportsPtr, resultViewport, objectTypeSetIsKnown);
248 return rateViewportsInternal(sampleNextBestViewports, currentCameraViewport, ratedNextBestViewportsPtr, resultViewport, objectTypeSetIsKnown);
265 boost::thread_group threadGroup;
267 for (
int i : boost::irange(0, mNumberOfThreads)) {
269 boost::ref(sampleNextBestViewports),
270 boost::ref(currentCameraViewport),
271 boost::ref(ratedNextBestViewports),
272 objectTypeSetIsKnown));
274 threadGroup.join_all();
276 mDebugHelperPtr->write(
"Sorted list of all viewports (each best for pos & orient combi) in this iteration step.",
278 return mRatingModulePtr->getBestViewport(ratedNextBestViewports, resultViewport);
294 bool objectTypeSetIsKnown) {
295 unsigned int nViewportSamples = sampleNextBestViewports->size();
296 double startFactor = (double) threadId / mNumberOfThreads;
298 unsigned int startIdx = (int) (startFactor * nViewportSamples);
299 unsigned int endIdx = (int) (endFactor * nViewportSamples);
301 for (
int i : boost::irange(startIdx, endIdx)) {
302 ViewportPoint fullViewportPoint = sampleNextBestViewports->at(i);
304 if (!this->
doFrustumCulling(mThreadCameraModels[threadId], fullViewportPoint)) {
311 mDebugHelperPtr->write(
"Getting viewport with optimal object constellation for given position & orientation combination.",
313 if (objectTypeSetIsKnown) {
322 ratedNextBestViewports->push_back(fullViewportPoint);
335 mDebugHelperPtr->write(
"Getting viewport with optimal object constellation for given position & orientation combination.",
337 return ratingModulePtr->setBestScoreContainer(currentCameraViewport, fullViewportPoint);
348 ratingModulePtr->resetCache();
349 return ratingModulePtr->setSingleScoreContainer(currentCameraViewport, fullViewportPoint);
355 int iterationStep = 0;
358 float currentBestRating = 0;
366 sampledOrientationsPtr, 1.0 / pow(2.0, iterationStep),
367 intermediateResultViewport, iterationStep)) {
375 float rating = mRatingModulePtr->getRating(intermediateResultViewport.
score);
376 mDebugHelperPtr->write(
"THIS IS THE BEST VIEWPORT IN THE GIVEN ITERATION STEP.",
383 if (std::abs(rating - currentBestRating) <= this->
getEpsilon() || iterationStep >= mMaxIterationSteps) {
385 resultViewport = intermediateResultViewport;
386 ROS_INFO_STREAM (
"Next-best-view estimation SUCCEEDED. Took " << iterationStep <<
" iterations");
387 mDebugHelperPtr->write(
"THIS IS THE BEST VIEWPORT FOR ALL ITERATION STEPS.",
391 mDebugHelperPtr->write(std::stringstream() <<
"IterationStep: " << iterationStep,
397 currentBestViewport = intermediateResultViewport;
398 currentBestRating = rating;
418 if (sampledSpacePointCloudPtr->size() == 1 && this->
getEpsilon() < contractor) {
419 mDebugHelperPtr->write(
"No RViz visualization for this iteration step, since no new next-best-view found for that resolution.",
421 bool success =
doIterationStep(currentCameraViewport, currentBestViewport, sampledOrientationsPtr, contractor * .5, resultViewport, iterationStep);
431 if (!
rateViewports(sampleNextBestViewports, currentCameraViewport, ratedNextBestViewportsPtr)) {
440 return mRatingModulePtr->compareViewports(a, b);
442 std::sort(ratedNextBestViewportsPtr->begin(), ratedNextBestViewportsPtr->end(), sortFunction);
444 if (mRequireMinUtility) {
445 bool foundUtility =
false;
446 BOOST_REVERSE_FOREACH (
ViewportPoint &ratedViewport, *ratedNextBestViewportsPtr) {
447 if (ratedViewport.
score->getUnweightedUnnormalizedUtility() >
mMinUtility) {
448 resultViewport = ratedViewport;
455 resultViewport = ratedNextBestViewportsPtr->back();
458 resultViewport = ratedNextBestViewportsPtr->back();
464 mVisHelperPtr->triggerIterationVisualization(iterationStep, sampledOrientationsPtr, resultViewport,
465 sampledSpacePointCloudPtr, mSpaceSamplerPtr);
473 for (
unsigned int i = 0; i < pointCloudPtr->size(); i++) {
474 pointCloudPtr->at(i).z = height;
487 return doFrustumCulling(mCameraModelFilterPtr, position, orientation, indices, viewportPoint);
521 cameraModelFilterPtr->setIndices(resultViewportPoint.
child_indices);
527 cameraModelFilterPtr->filter(frustumIndicesPtr);
533 if (frustumIndicesPtr->size() == 0) {
548 mDebugHelperPtr->write(std::stringstream() <<
"Number of viewports: " << viewportPointList.size(),
DebugHelper::CALCULATION);
550 for (
unsigned int i = 0; i < viewportPointList.size(); i++) {
553 mDebugHelperPtr->write(std::stringstream() <<
"THIS IS VIEWPORT NR. " << i+1 <<
" IN THE LIST OF EXTERNAL VIEWPORTS.",
569 mDebugHelperPtr->write(std::stringstream() <<
"Viewpoint TAKEN",
586 mDebugHelperPtr->write(std::stringstream() <<
"Number of active normals before update: " <<
getNumberActiveNormals(),
589 unsigned int deactivatedNormals = mHypothesisUpdaterPtr->update(objectTypeSetPtr, viewportPoint);
591 mDebugHelperPtr->write(std::stringstream() <<
"Deactivated normals in viewport: " << deactivatedNormals,
DebugHelper::CALCULATION);
593 mDebugHelperPtr->write(std::stringstream() <<
"Number of active normals after update: " <<
getNumberActiveNormals(),
596 return deactivatedNormals;
620 BOOST_FOREACH(asr_msgs::AsrAttributedPoint element, msg.elements) {
623 pointCloudPoint.
color.r = 0;
624 pointCloudPoint.
color.g = 255;
625 pointCloudPoint.
color.b = 0;
626 pointCloudPoint.
type = element.type;
627 pointCloudPoint.
identifier = element.identifier;
630 if (mObjectTypeSetPtr->find(element.type) == mObjectTypeSetPtr->end())
631 mObjectTypeSetPtr->insert(element.type);
635 if (!mEnableCropBoxFiltering) {
643 pointCloudPoint.
color = colorByID;
645 if(mEnableIntermediateObjectWeighting)
650 if(responsePtr_Intermediate)
657 ROS_ERROR(
"Invalid object name %s or world model service call failed. Point Cloud not set!", pointCloudPoint.
type.c_str());
667 originalPointCloudPtr->push_back(pointCloudPoint);
671 if(mEnableCropBoxFiltering)
674 mCropBoxFilterPtr->setInputCloud(originalPointCloudPtr);
675 mCropBoxFilterPtr->filter(filteredObjectIndices);
677 mDebugHelperPtr->write(
"setPointCloudFromMessage::Filtering point cloud finished.",
680 mVisHelperPtr->triggerCropBoxVisualization(mCropBoxFilterPtr->getCropBoxWrapperPtrList());
685 for (
ObjectPoint& pointCloudPoint : *outputPointCloudPtr) {
693 outputPointCloudPtr = originalPointCloudPtr;
697 if(outputPointCloudPtr->size() > 0)
701 boost::range::iota(boost::iterator_range<Indices::iterator>(activeIndicesPtr->begin(), activeIndicesPtr->end()), 0);
709 mDebugHelperPtr->write(
"setPointCloudFromMessage::output point cloud is empty.",
731 ROS_INFO_STREAM(
"sampledSpacePointCloudPtr->size: " << sampledSpacePointCloudPtr->size());
735 return sampleNextBestViewports;
748 feasibleOrientationsCollectionPtr->push_back(q);
752 return feasibleOrientationsCollectionPtr;
763 SamplePointCloudPtr sampledSpacePointCloudPtr = mSpaceSamplerPtr->getSampledSpacePointCloud(spaceSampleStartVector, contractor);
766 this->
setHeight(sampledSpacePointCloudPtr, pointCloudHeight);
786 for (
SamplePoint &samplePoint : (*sampledSpacePointCloudPtr)) {
787 SimpleVector3 samplePointCoords = samplePoint.getSimpleVector3();
788 IndicesPtr samplePointChildIndices = samplePoint.child_indices;
791 mDebugHelperPtr->write(
"Iterating over all orientations for a given robot position.",
795 ViewportPoint sampleViewport(samplePointCoords, orientation);
799 sampleNextBestViewports->push_back(sampleViewport);
802 return sampleNextBestViewports;
809 if (responsePtr_ObjectData) {
812 int normalVectorCount = 0;
813 for (geometry_msgs::Point point : responsePtr_ObjectData->normal_vectors) {
815 normal = rotationMatrix * normal;
821 ROS_ERROR(
"Invalid object name '%s' in point cloud or object_database node not started. Point Cloud not set!", pointCloudPoint.
type.c_str());
825 objectsResources[pointCloudPoint.
type] = responsePtr_ObjectData->object_mesh_resource;
832 auto cropBoxPtrList = mCropBoxFilterPtr->getCropBoxWrapperPtrList();
833 int normalVectorCount = 0;
834 bool foundNormals =
false;
835 bool isInCropbox =
false;
837 CropBoxPtr cropBoxPtr = cropBoxWrapper->getCropBox();
838 Eigen::Vector4f max = cropBoxPtr->getMax();
839 Eigen::Vector3f translation = cropBoxPtr->getTranslation();
844 if (!cropBoxWrapper->getCropBoxNormalsList()->empty()) {
846 for (
SimpleVector3 normal : *cropBoxWrapper->getCropBoxNormalsList()) {
855 if (isInCropbox && !foundNormals) {
861 objectsResources[pointCloudPoint.
type] =
"package://asr_next_best_view/rsc/sphere.dae";
867 return isPointInRange(position(0,0), translation(0,0), max(0,0)) &&
873 return point >= min && point <= (min + lenght);
882 if(this->objectsResources.find(objectType) != this->objectsResources.end())
884 return this->objectsResources[objectType];
897 mPointCloudPtr = pointCloudPtr;
900 mKdTreePtr->setInputCloud(mPointCloudPtr);
901 mRatingModulePtr->setInputCloud(mPointCloudPtr);
902 mCameraModelFilterPtr->setInputCloud(mPointCloudPtr);
903 for (
int i : boost::irange(0, mNumberOfThreads)) {
904 mThreadRatingModules[i]->setInputCloud(mPointCloudPtr);
905 mThreadCameraModels[i]->setInputCloud(mPointCloudPtr);
907 mSpaceSamplerPtr->setInputCloud(mPointCloudPtr);
936 mActiveIndicesPtr = activeIndicesPtr;
951 result += objPoint.active_normal_vectors->size();
962 if (objPoint.identifier.compare(identifier) != 0 || objPoint.type.compare(type) != 0)
964 result += objPoint.normal_vectors->size();
975 if (objPoint.identifier.compare(identifier) != 0 || objPoint.type.compare(type) != 0)
977 result += objPoint.active_normal_vectors->size();
984 std::vector<std::pair<std::string, std::string>> result;
986 std::set<std::string> foundObjectTypeAndIds;
988 std::string currentObjTypeAndId = objPoint.type + objPoint.identifier;
989 if (foundObjectTypeAndIds.find(currentObjTypeAndId) == foundObjectTypeAndIds.end()) {
990 foundObjectTypeAndIds.insert(currentObjTypeAndId);
991 result.push_back(std::make_pair(objPoint.type, objPoint.identifier));
998 auto it = mPointCloudPtr->begin();
999 while (it != mPointCloudPtr->end()) {
1001 if (objPoint.
identifier.compare(identifier) != 0 || objPoint.
type.compare(type) != 0) {
1004 it = mPointCloudPtr->erase(it);
1007 if (mPointCloudPtr->empty()) {
1012 boost::range::iota(boost::iterator_range<Indices::iterator>(activeIndicesPtr->begin(), activeIndicesPtr->end()), 0);
1032 mUnitSphereSamplerPtr = unitSphereSamplerPtr;
1047 mSpaceSamplerPtr = spaceSamplerPtr;
1062 mRobotModelPtr = robotModelPtr;
1077 mCameraModelFilterPtr = cameraModelFilterPtr;
1092 mRatingModulePtr = ratingModulePtr;
1107 mMapHelperPtr = mapHelperPtr;
1119 mHypothesisUpdaterPtr = hypothesisUpdaterPtr;
1146 mMaxIterationSteps = maxIterationSteps;
1154 mRatingModuleAbstractFactoryPtr = ratingModuleAbstractFactoryPtr;
1155 for (
int i : boost::irange(0, mNumberOfThreads)) {
1156 if (mThreadRatingModules[i]) {
1157 mThreadRatingModules[i].reset();
1159 mThreadRatingModules[i] = mRatingModuleAbstractFactoryPtr->createRatingModule();
1176 mCameraModelFilterAbstractFactoryPtr = cameraModelFilterAbstractFactoryPtr;
1177 for (
int i : boost::irange(0, mNumberOfThreads)) {
1178 if (mThreadCameraModels[i]) {
1179 mThreadCameraModels[i].reset();
1181 mThreadCameraModels[i] = mCameraModelFilterAbstractFactoryPtr->createCameraModelFilter();
1207 mNumberOfThreads = boost::thread::hardware_concurrency();
1209 mNumberOfThreads = value;
1212 mThreadCameraModels.clear();
1213 mThreadRatingModules.clear();
1214 mThreadCameraModels.resize(mNumberOfThreads);
1215 mThreadRatingModules.resize(mNumberOfThreads);
1216 for (
int i : boost::irange(0, mNumberOfThreads)) {
1218 if (mThreadRatingModules[i]) {
1219 mThreadRatingModules[i].reset();
1221 mThreadRatingModules[i] = mRatingModuleAbstractFactoryPtr->createRatingModule();
1222 mThreadRatingModules[i]->setInputCloud(mPointCloudPtr);
1225 if (mThreadCameraModels[i]) {
1226 mThreadCameraModels[i].reset();
1228 mThreadCameraModels[i] = mCameraModelFilterAbstractFactoryPtr->createCameraModelFilter();
1229 mThreadCameraModels[i]->setInputCloud(mPointCloudPtr);
1238 mMinUtility = minUtility;
1246 mRequireMinUtility = requireMinUtility;
bool rateViewports(const ViewportPointCloudPtr &sampleNextBestViewportsPtr, const ViewportPoint ¤tCameraViewport, ViewportPoint &resultViewport, bool objectTypeSetIsKnown=false)
rates given viewports, which must contain child_indices
bool getFeasibleHypothesis(SimpleVector3 samplePoint, IndicesPtr &resultIndicesPtr)
getFeasibleHypothesis finds object hypothesis near samplePoint
ObjectPointCloudPtr getPointCloudPtr()
void getFeasibleSamplePoints(const SamplePointCloudPtr &sampledSpacePointCloudPtr, IndicesPtr &resultIndicesPtr)
SimpleVector3 getSimpleVector3() const
void setHypothesisUpdater(const HypothesisUpdaterPtr &hypothesisUpdaterPtr)
ObjectPointCloudPtr mPointCloudPtr
unsigned int getNumberTotalNormals(std::string type, std::string identifier)
IndicesPtr getActiveIndices()
VisualizationHelperPtr mVisHelperPtr
pcl::PointCloud< ObjectPoint > ObjectPointCloud
void setEnableIntermediateObjectWeighting(const bool mEnableIntermediateObjectWeighting)
void ratingThread(int threadId, boost::mutex &mutex, const ViewportPointCloudPtr &sampleNextBestViewports, const ViewportPoint ¤tCameraViewport, const ViewportPointCloudPtr &ratedNextBestViewports, bool objectTypeSetIsKnown)
ratingThread
bool doFrustumCulling(const SimpleVector3 &position, const SimpleQuaternion &orientation, const IndicesPtr &indices, ViewportPoint &viewportPoint)
creates a new camera viewport with the given data
HypothesisUpdaterPtr mHypothesisUpdaterPtr
void setHeight(SamplePointCloudPtr pointCloudPtr, double height)
void setCameraModelFilter(const CameraModelFilterPtr &cameraModelFilterPtr)
bool setNormalsInCropBoxMode(const ObjectPoint &pointCloudPoint)
bool rateViewportsInternal(const ViewportPointCloudPtr &sampleNextBestViewports, const ViewportPoint ¤tCameraViewport, ViewportPointCloudPtr &ratedNextBestViewports, ViewportPoint &resultViewport, bool objectTypeSetIsKnown)
rates viewports and returns vector with rated wiewports and the best rated viewport ...
CameraModelFilterAbstractFactoryPtr mCameraModelFilterAbstractFactoryPtr
bool isPointInCropbox(const SimpleVector3 &position, const Eigen::Vector3f &translation, const Eigen::Vector4f &max) const
unsigned int getNumberActiveNormals(std::string type, std::string identifier)
std::string getMeshPathByName(std::string objectType)
bool doIteration(const ViewportPoint ¤tCameraViewport, const SimpleQuaternionCollectionPtr &sampledOrientationsPtr, ViewportPoint &resultViewport)
bool setNormals(const ObjectPoint &pointCloudPoint)
SimpleVector3CollectionPtr normal_vectors
CameraModelFilterPtr mCameraModelFilterPtr
SpaceSamplerPtr mSpaceSamplerPtr
bool isPointInRange(float point, float min, float lenght) const
IntermediateObjectWeightResponse::Ptr IntermediateObjectWeightResponsePtr
void setCameraModelFilterAbstractFactoryPtr(const CameraModelFilterAbstractFactoryPtr &cameraModelFilterAbstractFactoryPtr)
setCameraModelFilterAbstractFactoryPtr used to generate camera models per thread. ...
ViewportPointCloud::Ptr ViewportPointCloudPtr
Eigen::Matrix< Precision, 3, 1 > SimpleVector3
void setMaxIterationSteps(int maxIterationSteps)
void initializeRobotState(const ViewportPoint ¤tCameraViewport)
initializeRobotState initializes the robotstate to the given viewport
bool doFrustumCulling(const CameraModelFilterPtr &cameraModelFilterPtr, ViewportPoint &resultViewportPoint)
creates a new camera viewport with the given data
bool setPointCloudFromMessage(const asr_msgs::AsrAttributedPointCloud &msg)
///
bool doFrustumCulling(const CameraModelFilterPtr &cameraModelFilterPtr, const SimpleVector3 &position, const SimpleQuaternion &orientation, const IndicesPtr &indices, ViewportPoint &resultViewport)
creates a new camera viewport with the given data
bool doIterationStep(const ViewportPoint ¤tCameraViewport, const ViewportPoint ¤tBestViewport, const SimpleQuaternionCollectionPtr &sampledOrientationsPtr, float contractor, ViewportPoint &resultViewport, int iterationStep)
std::vector< SimpleQuaternion, Eigen::aligned_allocator< SimpleQuaternion > > SimpleQuaternionCollection
HypothesisUpdaterPtr getHypothesisUpdater()
UnitSphereSamplerPtr mUnitSphereSamplerPtr
ViewportPointCloudPtr combineSamples(SamplePointCloudPtr sampledSpacePointCloudPtr, SimpleQuaternionCollectionPtr sampledOrientationsPtr)
combines space samples and orientation samples to viewport samples
CropBoxFilterPtr mCropBoxFilterPtr
std::map< std::string, std::string > objectsResources
bool mEnableIntermediateObjectWeighting
void setNumberOfThreads(int value)
sets number of threads used to rate samples, negative numbers use boost::thread::hardware_concurrency...
std::vector< CameraModelFilterPtr > mThreadCameraModels
std::set< std::string > ObjectTypeSet
std::vector< int > Indices
ObjectPointCloudPtr point_cloud
double getMinUtility() const
unsigned int updateObjectPointCloud(const ObjectTypeSetPtr &objectTypeSetPtr, const ViewportPoint &viewportPoint)
Updates the point cloud under the assumption that the given viewport was chosen.
bool rateViewports(const ViewportPointCloudPtr &sampleNextBestViewports, const ViewportPoint ¤tCameraViewport, ViewportPointCloudPtr &ratedNextBestViewportsPtr, bool objectTypeSetIsKnown=false)
rates given viewports, which must contain child_indices
SamplePointCloud::Ptr SamplePointCloudPtr
pcl::PointCloud< SamplePoint > SamplePointCloud
SimpleQuaternion getSimpleQuaternion() const
IntermediateObjectWeightResponsePtr getIntermediateObjectValue(std::string objectTypeName)
std::vector< float > SquaredDistances
The CropBoxFilter class, used to filter objects in crop boxes.
DefaultScoreContainerPtr score
SamplePointCloudPtr generateSpaceSamples(SimpleVector3 spaceSampleStartVector, double contractor, double pointCloudHeight)
generateSpaceSamples
ObjectPointCloudPtr child_point_cloud
boost::shared_ptr< RatingModule > RatingModulePtr
Definition for the shared pointer type of the class.
void setMinUtility(double minUtility)
void setSpaceSampler(const SpaceSamplerPtr &spaceSamplerPtr)
UnitSphereSamplerPtr getUnitSphereSampler()
bool rateSingleViewportFixedObjectTypes(const RatingModulePtr &ratingModulePtr, const ViewportPoint ¤tCameraViewport, ViewportPoint &fullViewportPoint)
uses setSingleScoreContainer to use object_type_set of fullViewportPoint to rate. ...
this namespace contains all generally usable classes.
void setRatingModuleAbstractFactoryPtr(const RatingModuleAbstractFactoryPtr &ratingModuleAbstractFactoryPtr)
setRatingModuleAbstractFactoryPtr used to generate rating modules per thread.
boost::shared_ptr< CropBoxFilter > CropBoxFilterPtr
CropBoxFilterPtr typedef for consistency/less code to create boost::shared_ptr to CropBoxFilter...
ObjectMetaDataResponsePtr getObjectMetaData(std::string objectTypeName)
void setPointCloudPtr(const ObjectPointCloudPtr &pointCloudPtr)
void setMapHelper(const MapHelperPtr &mapHelperPtr)
pcl::KdTreeFLANN< ObjectPoint > KdTree
NextBestViewCalculator(const UnitSphereSamplerPtr &unitSphereSamplerPtr=UnitSphereSamplerPtr(), const MapHelperPtr &mapHelperPtr=MapHelperPtr(), const SpaceSamplerPtr &spaceSamplerPtr=SpaceSamplerPtr(), const robot_model_services::RobotModelPtr &robotModelPtr=robot_model_services::RobotModelPtr(), const CameraModelFilterPtr &cameraModelFilterPtr=CameraModelFilterPtr(), const RatingModulePtr &ratingModulePtr=RatingModulePtr())
pcl::PointCloud< ViewportPoint > ViewportPointCloud
robot_model_services::RobotModelPtr mRobotModelPtr
RatingModuleAbstractFactoryPtr mRatingModuleAbstractFactoryPtr
void setActiveIndices(const IndicesPtr &activeIndicesPtr)
sets the active indices.
void loadCropBoxListFromFile(const std::string mCropBoxListFilePath)
static boost::shared_ptr< DebugHelper > getInstance()
void updateFromExternalViewportPointList(const std::vector< ViewportPoint > &viewportPointList)
updates point cloud with external viewport point list
boost::shared_ptr< Indices > IndicesPtr
std_msgs::ColorRGBA color
bool filterObjectPointCloudByTypes(const ObjectTypeSetPtr &objectTypeSetPtr, ViewportPoint &viewportPoint)
filters all the objects in this viewport with one of the given types and puts them in a new viewport...
bool rateSingleViewportOptimizeObjectTypes(const RatingModulePtr &ratingModulePtr, const ViewportPoint ¤tCameraViewport, ViewportPoint &fullViewportPoint)
uses setBestScoreContainer to optimize which objects should be recognized
void setUnitSphereSampler(const UnitSphereSamplerPtr &unitSphereSamplerPtr)
#define ROS_WARN_STREAM(args)
boost::shared_ptr< ObjectTypeSet > ObjectTypeSetPtr
SimpleVector3 getPosition() const
DefaultViewportPoint ViewportPoint
SpaceSamplerPtr getSpaceSampler()
boost::shared_ptr< VisualizationHelper > VisualizationHelperPtr
static std_msgs::ColorRGBA getMeshColor(std::string observedId)
MapHelperPtr mMapHelperPtr
CameraModelFilterAbstractFactoryPtr getCameraModelFilterAbstractFactoryPtr()
getCameraModelFilterAbstractFactoryPtr
robot_model_services::RobotModelPtr getRobotModel()
void setEnableCropBoxFiltering(const bool mEnableCropBoxFiltering)
SimpleVector3 getPosition() const
int getNumberOfThreads() const
getNumberOfThreads
RatingModuleAbstractFactoryPtr getRatingModuleAbstractFactoryPtr()
getRatingModuleAbstractFactoryPtr
void setRequireMinUtility(bool requireMinUtility)
std::vector< std::pair< std::string, std::string > > getTypeAndIds()
ObjectPointCloud::Ptr ObjectPointCloudPtr
bool removeObjects(std::string type, std::string identifier)
#define ROS_INFO_STREAM(args)
DebugHelperPtr mDebugHelperPtr
double intermediate_object_weight
void setRobotModel(const robot_model_services::RobotModelPtr &robotModelPtr)
boost::shared_ptr< CameraModelFilter > CameraModelFilterPtr
Definition for the shared pointer type of the class.
void setEpsilon(float epsilon)
ObjectTypeSetPtr mObjectTypeSetPtr
ViewportPointCloudPtr generateSampleViewports(SimpleVector3 spaceSampleStartVector, double contractor, double pointCloudHeight)
generateSampleViewports
std::vector< RatingModulePtr > mThreadRatingModules
RatingModulePtr mRatingModulePtr
boost::shared_ptr< UnitSphereSampler > UnitSphereSamplerPtr
Definition for the shared pointer type of the class.
bool getRequireMinUtility() const
RatingModulePtr getRatingModule()
MapHelperPtr getMapHelper()
bool calculateNextBestView(const ViewportPoint ¤tCameraViewport, ViewportPoint &resultViewport)
IndicesPtr mActiveIndicesPtr
CameraModelFilterPtr getCameraModelFilter()
ObjectMetaDataResponse::Ptr ObjectMetaDataResponsePtr
Eigen::Quaternion< Precision > SimpleQuaternion
int getNumberActiveNormals()
Eigen::Matrix< Precision, 3, 3 > SimpleMatrix3
SimpleQuaternion getSimpleQuaternion() const
IndicesPtr active_normal_vectors
bool mEnableCropBoxFiltering
void getFeasibleViewports(const ViewportPointCloudPtr &sampleViewportsPtr, IndicesPtr &resultIndicesPtr)
getFeasibleViewports returns a vector of indices/sampleViewports which contain nearby (hypothesis) ...
ObjectTypeSetPtr object_type_set
void setRatingModule(const RatingModulePtr &ratingModulePtr)
boost::shared_ptr< SpaceSampler > SpaceSamplerPtr
Definition for the shared pointer type of the class.
boost::shared_ptr< MapHelper > MapHelperPtr
SimpleQuaternionCollectionPtr generateOrientationSamples()
generateOrientationSamples
bool doFrustumCulling(ViewportPoint &resultViewportPoint)
creates a new camera viewport with the given data