42 #include <opencv2/stitching/detail/matchers.hpp> 43 #include <opencv2/stitching/detail/motion_estimators.hpp> 51 std::vector<cv::detail::ImageFeatures> image_features;
52 std::vector<cv::detail::MatchesInfo> pairwise_matches;
53 std::vector<cv::detail::CameraParams> transforms;
54 std::vector<int> good_indices;
56 cv::Ptr<cv::detail::FeaturesFinder> finder =
58 cv::Ptr<cv::detail::FeaturesMatcher> matcher =
59 cv::makePtr<cv::detail::AffineBestOf2NearestMatcher>();
60 cv::Ptr<cv::detail::Estimator> estimator =
61 cv::makePtr<cv::detail::AffineBasedEstimator>();
62 cv::Ptr<cv::detail::BundleAdjusterBase> adjuster =
63 cv::makePtr<cv::detail::BundleAdjusterAffinePartial>();
71 image_features.reserve(
images_.size());
72 for (
const cv::Mat& image :
images_) {
73 image_features.emplace_back();
75 (*finder)(image, image_features.back());
78 finder->collectGarbage();
82 (*matcher)(image_features, pairwise_matches);
83 matcher->collectGarbage();
91 good_indices = cv::detail::leaveBiggestComponent(
92 image_features, pairwise_matches, static_cast<float>(confidence));
95 ROS_DEBUG(
"calculating transforms in global reference frame");
97 if (!(*estimator)(image_features, pairwise_matches, transforms)) {
103 for (
auto& transform : transforms) {
104 transform.R.convertTo(transform.R, CV_32F);
106 ROS_DEBUG(
"optimizing global transforms");
107 adjuster->setConfThresh(confidence);
108 if (!(*adjuster)(image_features, pairwise_matches, transforms)) {
109 ROS_WARN(
"Bundle adjusting failed. Could not estimate transforms.");
116 for (
auto& j : good_indices) {
118 transforms[i].R.convertTo(
transforms_[static_cast<size_t>(j)], CV_64F);
136 std::vector<cv::Mat> imgs_warped;
137 imgs_warped.reserve(
images_.size());
138 std::vector<cv::Rect> rois;
141 for (
size_t i = 0; i <
images_.size(); ++i) {
143 imgs_warped.emplace_back();
149 if (imgs_warped.empty()) {
154 nav_msgs::OccupancyGrid::Ptr result;
156 result = compositor.
compose(imgs_warped, rois);
159 result->info.resolution =
grids_[0]->info.resolution;
166 std::vector<geometry_msgs::Transform> result;
170 if (transform.empty()) {
171 result.emplace_back();
176 geometry_msgs::Transform ros_transform;
177 ros_transform.translation.x = transform.at<
double>(0, 2);
178 ros_transform.translation.y = transform.at<
double>(1, 2);
179 ros_transform.translation.z = 0.;
182 double a = transform.at<
double>(0, 0);
183 double b = transform.at<
double>(1, 0);
184 ros_transform.rotation.w = std::sqrt(2. + 2. * a) * 0.5;
185 ros_transform.rotation.x = 0.;
186 ros_transform.rotation.y = 0.;
187 ros_transform.rotation.z = std::copysign(std::sqrt(2. - 2. * a) * 0.5, b);
189 result.push_back(ros_transform);
std::vector< cv::Mat > transforms_
std::vector< cv::Mat > images_
nav_msgs::OccupancyGrid::Ptr composeGrids()
cv::Rect warp(const cv::Mat &grid, const cv::Mat &transform, cv::Mat &warped_grid)
static void writeDebugMatchingInfo(const std::vector< cv::Mat > &images, const std::vector< cv::detail::ImageFeatures > &image_features, const std::vector< cv::detail::MatchesInfo > &pairwise_matches)
std::vector< geometry_msgs::Transform > getTransforms() const
bool estimateTransforms(FeatureType feature=FeatureType::AKAZE, double confidence=1.0)
nav_msgs::OccupancyGrid::Ptr compose(const std::vector< cv::Mat > &grids, const std::vector< cv::Rect > &rois)
static cv::Ptr< cv::detail::FeaturesFinder > chooseFeatureFinder(FeatureType type)
std::vector< nav_msgs::OccupancyGrid::ConstPtr > grids_