diff --git a/CMakeLists.txt b/CMakeLists.txt index d29ec62..5d39823 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,13 +10,12 @@ MESSAGE("Build type: " ${CMAKE_BUILD_TYPE}) add_definitions("-DENABLE_SSE") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3 -march=native ") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3 -march=native") - +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) set(CMAKE_BUILD_TYPE Release) -set(CMAKE_CXX_FLAGS "-std=c++11 -Wall -O2 ${SSE_FLAGS} -msse4") -set(CMAKE_CXX_FLAGS_RELEASE "-std=c++11 -O3 -fopenmp -pthread") - +set(CMAKE_CXX_FLAGS "-std=c++14 -Wall -O2 ${SSE_FLAGS} -msse4") +set(CMAKE_CXX_FLAGS_RELEASE "-std=c++14 -O3 -fopenmp -pthread") LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules) @@ -32,11 +31,23 @@ find_package(Eigen3 REQUIRED) # pangolin find_package(Pangolin REQUIRED) +# G2O +find_package(G2O REQUIRED) +include_directories(${G2O_INCLUDE_DIRS}) + +# glog +find_package(Glog REQUIRED) +include_directories(${GLOG_INCLUDE_DIRS}) + # Sophus find_package(Sophus REQUIRED) # set(Sophus_LIBRARIES libSophus.so) # include_directories(${Sophus_INCLUDE_DIRS}) +# csparse +find_package(CSparse REQUIRED) +include_directories(${CSPARSE_INCLUDE_DIR}) + include_directories( ${PROJECT_SOURCE_DIR} ${PROJECT_SOURCE_DIR}/include @@ -57,13 +68,15 @@ src/feature.cpp src/map.cpp src/mappoint.cpp src/camera.cpp -src/orb.cpp ) target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} ${EIGEN3_LIBS} ${Pangolin_LIBRARIES} +g2o_core g2o_stuff g2o_types_sba g2o_solver_csparse g2o_csparse_extension +${CSPARSE_LIBRARY} +${GLOG_LIBRARIES} pthread ) diff --git a/README.md b/README.md new file mode 100644 index 0000000..b686ea7 --- /dev/null +++ b/README.md @@ -0,0 +1,8 @@ +# ECT_SLAM +ECT_SLAM is a real-time SLAM framework for **Monocular Visual Systems**. It features initialization, feature-based visual odometry and local pose graph optimization. This code runs on **Linux**, and is fully integrated with **ROS**. +## Reference +* **ORB-SLAM: A Versatile and Accurate Monocular SLAM System**, Mur-Artal, Raul, Jose Maria Martinez Montiel, Juan D. Tardos, IEEE Transactions on Robotics [PDF](http://webdiis.unizar.es/~raulmur/MurMontielTardosTRO15.pdf) + +* **VINS-Mono: A Robust and Versatile Monocular Visual-Inertial State Estimator**, Tong Qin, Peiliang Li, Zhenfei Yang, Shaojie Shen, IEEE Transactions on Robotics [PDF](https://ieeexplore.ieee.org/document/8421746/?arnumber=8421746&source=authoralert) + +* **视觉 SLAM 十四讲: 从理论到实践**, 高翔, 电子工业出版社, 2017 \ No newline at end of file diff --git a/build.sh b/build.sh index 3d3ede5..9395eff 100755 --- a/build.sh +++ b/build.sh @@ -3,5 +3,5 @@ echo "Building ECT_SLAM ..." rm -rf build mkdir build cd build -cmake .. -DCMAKE_BUILD_TYPE=Release +cmake .. -DCMAKE_BUILD_TYPE=Debug make -j8 diff --git a/build_ros.sh b/build_ros.sh index 8307d0f..b1c6acd 100755 --- a/build_ros.sh +++ b/build_ros.sh @@ -4,5 +4,5 @@ cd example/ROS/ECT_SLAM rm -rf build mkdir build cd build -cmake .. -DROS_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=/usr/bin/python3 +cmake .. -DROS_BUILD_TYPE=Debug -DPYTHON_EXECUTABLE=/usr/bin/python3 make -j8 diff --git a/config/default.yaml b/config/default.yaml index bcb830c..7d223ad 100644 --- a/config/default.yaml +++ b/config/default.yaml @@ -9,9 +9,23 @@ camera.fy: 516.5 camera.cx: 325.1 camera.cy: 249.7 +# camera.fx: 493.0167 +# camera.fy: 491.55953 +# camera.cx: 317.97856 +# camera.cy: 242.392 + +is_equalize: 1 + num_features: 150 num_features_init: 50 num_features_tracking: 50 +ratio_for_keyframe: 0.50 +num_for_keyframe: 80 + # ros -# img_topic: "cam0/image_raw" +image_topic: "/camera/rgb/image_color" #"cam0/image_raw" + +# backend +chi2_th: 5.991 +num_iteration: 20 \ No newline at end of file diff --git a/example/ROS/ECT_SLAM/CMakeLists.txt b/example/ROS/ECT_SLAM/CMakeLists.txt index 5bae652..96ad11f 100644 --- a/example/ROS/ECT_SLAM/CMakeLists.txt +++ b/example/ROS/ECT_SLAM/CMakeLists.txt @@ -12,9 +12,10 @@ MESSAGE("Build type: " ${ROS_BUILD_TYPE}) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3 -march=native ") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3 -march=native") + # Check C++11 or C++0x support -set(CMAKE_CXX_FLAGS "-std=c++11 -Wall") -set(CMAKE_CXX_FLAGS_RELEASE "-std=c++11 -O3 -fopenmp -pthread") +set(CMAKE_CXX_FLAGS "-std=c++14 -Wall") +set(CMAKE_CXX_FLAGS_RELEASE "-std=c++14 -O3 -fopenmp -pthread") find_package(OpenCV 3.0 QUIET) @@ -37,6 +38,7 @@ ${Pangolin_INCLUDE_DIRS} set(LIBS ${OpenCV_LIBS} +opencv_core ${EIGEN3_LIBS} ${Pangolin_LIBRARIES} ${PROJECT_SOURCE_DIR}/../../../lib/libECT_SLAM.so @@ -47,6 +49,15 @@ rosbuild_add_executable(Mono src/ros_mono.cpp ) +# Node for TUM dataset +rosbuild_add_executable(TUM +src/tum.cpp +) + target_link_libraries(Mono ${LIBS} ) + +target_link_libraries(TUM +${LIBS} +) \ No newline at end of file diff --git a/example/ROS/ECT_SLAM/src/ros_mono.cpp b/example/ROS/ECT_SLAM/src/ros_mono.cpp index 162859c..c4b4410 100644 --- a/example/ROS/ECT_SLAM/src/ros_mono.cpp +++ b/example/ROS/ECT_SLAM/src/ros_mono.cpp @@ -10,6 +10,7 @@ #include #include"system.hpp" +#include"config.hpp" using namespace std; @@ -28,9 +29,9 @@ int main(int argc, char **argv) ros::init(argc, argv, "Mono"); ros::start(); - if(argc != 2) + if(argc != 3) { - cerr << endl << "Usage: rosrun ECT_SLAM Mono path_to_settings" << endl; + cerr << endl << "Usage: rosrun ECT_SLAM Mono path_to_settings image_topic" << endl; ros::shutdown(); return 1; } @@ -39,11 +40,10 @@ int main(int argc, char **argv) ECT_SLAM::System SLAM(argv[1],ECT_SLAM::System::MONOCULAR); SLAM.Init(); - ImageGrabber igb(&SLAM); ros::NodeHandle nodeHandler; - ros::Subscriber sub = nodeHandler.subscribe("cam0/image_raw", 1, &ImageGrabber::GrabImage,&igb); + ros::Subscriber sub = nodeHandler.subscribe(argv[2], 1, &ImageGrabber::GrabImage,&igb); ros::spin(); diff --git a/include/algorithm.hpp b/include/algorithm.hpp index b8eba4f..452cc2d 100644 --- a/include/algorithm.hpp +++ b/include/algorithm.hpp @@ -1,36 +1,40 @@ +#pragma once #include "common_include.hpp" -namespace ECT_SLAM { +namespace ECT_SLAM +{ -/** + /** * linear triangulation with SVD * @param poses poses, * @param points points in normalized plane * @param pt_world triangulated point in the world * @return true if success */ -inline bool triangulation(const std::vector &poses, - const std::vector points, Vec3 &pt_world) { - MatXX A(2 * poses.size(), 4); - VecX b(2 * poses.size()); - b.setZero(); - for (size_t i = 0; i < poses.size(); ++i) { - Mat34 m = poses[i].matrix3x4(); - A.block<1, 4>(2 * i, 0) = points[i][0] * m.row(2) - m.row(0); - A.block<1, 4>(2 * i + 1, 0) = points[i][1] * m.row(2) - m.row(1); - } - auto svd = A.bdcSvd(Eigen::ComputeThinU | Eigen::ComputeThinV); - pt_world = (svd.matrixV().col(3) / svd.matrixV()(3, 3)).head<3>(); + inline bool triangulation(const std::vector &poses, + const std::vector points, Vec3 &pt_world) + { + MatXX A(2 * poses.size(), 4); + VecX b(2 * poses.size()); + b.setZero(); + for (size_t i = 0; i < poses.size(); ++i) + { + Mat34 m = poses[i].matrix3x4(); + A.block<1, 4>(2 * i, 0) = points[i][0] * m.row(2) - m.row(0); + A.block<1, 4>(2 * i + 1, 0) = points[i][1] * m.row(2) - m.row(1); + } + auto svd = A.bdcSvd(Eigen::ComputeThinU | Eigen::ComputeThinV); + pt_world = (svd.matrixV().col(3) / svd.matrixV()(3, 3)).head<3>(); - if (svd.singularValues()[3] / svd.singularValues()[2] < 1e-2) { - // 解质量不好,放弃 - return true; + if (svd.singularValues()[3] / svd.singularValues()[2] < 1e-2) + { + // 解质量不好,放弃 + return true; + } + return false; } - return false; -} - -// converters -inline Vec2 toVec2(const cv::Point2f p) { return Vec2(p.x, p.y); } -} // namespace ECT_SLAM + // converters + inline Vec2 toVec2(const cv::Point2f p) { return Vec2(p.x, p.y); } +} // namespace ECT_SLAM diff --git a/include/camera.hpp b/include/camera.hpp index 24fdd0e..7c23389 100644 --- a/include/camera.hpp +++ b/include/camera.hpp @@ -2,41 +2,59 @@ #include "common_include.hpp" -namespace ECT_SLAM { +namespace ECT_SLAM +{ -// Pinhole stereo camera model -class Camera { - public: - EIGEN_MAKE_ALIGNED_OPERATOR_NEW; - typedef std::shared_ptr Ptr; + // Pinhole stereo camera model + class Camera + { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW; + typedef std::shared_ptr Ptr; - double fx_ = 0, fy_ = 0, cx_ = 0, cy_ = 0; // Camera intrinsics + double fx_ = 0, fy_ = 0, cx_ = 0, cy_ = 0; // Camera intrinsics + double k1_ = 0, k2_ = 0, r1_ = 0, r2_ = 0; - Camera(); + Camera(); - Camera(double fx, double fy, double cx, double cy) - : fx_(fx), fy_(fy), cx_(cx), cy_(cy) { - } + Camera(double fx, double fy, double cx, double cy) + : fx_(fx), fy_(fy), cx_(cx), cy_(cy) + { + } - // return intrinsic matrix - Mat33 K() const { - Mat33 k; - k << fx_, 0, cx_, 0, fy_, cy_, 0, 0, 1; - return k; - } + Mat33f K() const + { + Mat33f k; + k << fx_, 0, cx_, 0, fy_, cy_, 0, 0, 1; + return k; + } - // coordinate transform: world, camera, pixel - Vec3 world2camera(const Vec3 &p_w, const SE3 &T_c_w); + // return intrinsic matrix + Mat33 K_d() const + { + Mat33 k; + k << fx_, 0, cx_, 0, fy_, cy_, 0, 0, 1; + return k; + } - Vec3 camera2world(const Vec3 &p_c, const SE3 &T_c_w); + // return intrinsic matrix + cv::Mat K_cv() const + { + return (cv::Mat1d(3, 3) << fx_, 0, cx_, 0, fy_, cy_, 0, 0, 1); + } - Vec2 camera2pixel(const Vec3 &p_c); + // coordinate transform: world, camera, pixel + Vec3 world2camera(const Vec3 &p_w, const SE3 &T_c_w); - Vec3 pixel2camera(const Vec2 &p_p, double depth = 1); + Vec3 camera2world(const Vec3 &p_c, const SE3 &T_c_w); - Vec3 pixel2world(const Vec2 &p_p, const SE3 &T_c_w, double depth = 1); + Vec2 camera2pixel(const Vec3 &p_c); - Vec2 world2pixel(const Vec3 &p_w, const SE3 &T_c_w); -}; + Vec3 pixel2camera(const Vec2 &p_p, double depth = 1); -} // namespace ECT_SLAM + Vec3 pixel2world(const Vec2 &p_p, const SE3 &T_c_w, double depth = 1); + + Vec2 world2pixel(const Vec3 &p_w, const SE3 &T_c_w); + }; + +} // namespace ECT_SLAM diff --git a/include/common_include.hpp b/include/common_include.hpp index 8bf356a..64b73ff 100644 --- a/include/common_include.hpp +++ b/include/common_include.hpp @@ -55,9 +55,11 @@ typedef Eigen::Matrix Mat1414; // float matricies typedef Eigen::Matrix Mat33f; +typedef Eigen::Matrix Mat34f; typedef Eigen::Matrix Mat103f; typedef Eigen::Matrix Mat22f; typedef Eigen::Matrix Vec3f; +typedef Eigen::Matrix Vec4f; typedef Eigen::Matrix Vec2f; typedef Eigen::Matrix Vec6f; typedef Eigen::Matrix Mat18f; @@ -110,10 +112,11 @@ typedef Sophus::SO3d SO3; // for cv #include +#include using cv::Mat; -// // glog -// #include +// glog +#include #endif // ECT_SLAM_COMMON_INCLUDE_H \ No newline at end of file diff --git a/include/feature.hpp b/include/feature.hpp index 0a38a43..e8575e8 100644 --- a/include/feature.hpp +++ b/include/feature.hpp @@ -3,36 +3,43 @@ #include #include #include "common_include.hpp" -#include "orb.hpp" -namespace ECT_SLAM { +namespace ECT_SLAM +{ -struct Frame; -struct MapPoint; + struct Frame; + struct MapPoint; + + enum STATUS{ + NONMATCH=0, + MATCH3D=1, + MATCH2D=2 + }; -/** + /** * 2D 特征点 * 在三角化之后会被关联一个地图点 */ -struct Feature { - public: - EIGEN_MAKE_ALIGNED_OPERATOR_NEW; - typedef std::shared_ptr Ptr; + struct Feature + { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW; + typedef std::shared_ptr Ptr; - std::weak_ptr frame_; // 持有该feature的frame - cv::KeyPoint position_; // 2D提取位置 - std::weak_ptr map_point_; // 关联地图点 - DescType descriptor_; // 描述子 + std::weak_ptr frame_; // 持有该feature的frame + cv::KeyPoint position_; // 2D提取位置 + std::weak_ptr map_point_; // 关联地图点 - bool is_outlier_ = false; // 是否为异常点 + bool is_outlier_ = false; // 是否为异常点 + STATUS status_ = STATUS::NONMATCH; - //! TODO: timestamp + //! TODO: timestamp - public: - Feature() {} + public: + Feature() {} - Feature(std::shared_ptr frame, const cv::KeyPoint &kp, const DescType &desc) - : frame_(frame), position_(kp), descriptor_(desc) {} -}; -} // namespace ECT_SLAM + Feature(std::shared_ptr frame, const cv::KeyPoint &kp) + : frame_(frame), position_(kp) {} + }; +} // namespace ECT_SLAM diff --git a/include/frame.hpp b/include/frame.hpp index 059308b..4a911b8 100644 --- a/include/frame.hpp +++ b/include/frame.hpp @@ -28,6 +28,9 @@ struct Frame { // extracted features in image std::vector> features_; + // std::vector descriptors_; + cv::Mat descriptors_; + public: // data members Frame() {} @@ -40,6 +43,10 @@ struct Frame { return pose_; } + Mat34f RT(){ + return Pose().matrix().block<3, 4>(0, 0).cast (); + } + void SetPose(const SE3 &pose) { std::unique_lock lck(pose_mutex_); pose_ = pose; diff --git a/include/frontend.hpp b/include/frontend.hpp index 2e8b049..f9da26b 100644 --- a/include/frontend.hpp +++ b/include/frontend.hpp @@ -6,90 +6,130 @@ #include "frame.hpp" #include "map.hpp" -namespace ECT_SLAM { +namespace ECT_SLAM +{ -class Backend; -class Viewer; + class Backend; + class Viewer; -enum class FrontendStatus { INITING, TRACKING_GOOD, TRACKING_BAD, LOST }; + enum class FrontendStatus + { + INITING, + TRACKING_GOOD, + TRACKING_BAD, + LOST + }; -/** + /** * 前端 * 估计当前帧Pose,在满足关键帧条件时向地图加入关键帧并触发优化 */ -class Frontend { + class Frontend + { public: - EIGEN_MAKE_ALIGNED_OPERATOR_NEW; - typedef std::shared_ptr Ptr; + EIGEN_MAKE_ALIGNED_OPERATOR_NEW; + typedef std::shared_ptr Ptr; - Frontend(); + Frontend(); - /// 外部接口,添加一个帧并计算其定位结果 - bool AddFrame(Frame::Ptr frame); + /// 外部接口,添加一个帧并计算其定位结果 + bool AddFrame(Frame::Ptr frame); - /// Set函数 - void SetMap(Map::Ptr map) { map_ = map; } + /// Set函数 + void SetMap(Map::Ptr map) { map_ = map; } - void SetBackend(std::shared_ptr backend) { backend_ = backend; } + void SetBackend(std::shared_ptr backend) { backend_ = backend; } - void SetViewer(std::shared_ptr viewer) { viewer_ = viewer; } + void SetViewer(std::shared_ptr viewer) { viewer_ = viewer; } - FrontendStatus GetStatus() const { return status_; } + FrontendStatus GetStatus() const { return status_; } - void SetCameras(Camera::Ptr cam) { - camera_ = cam; - } + void SetCameras(Camera::Ptr cam) + { + camera_ = cam; + } private: - /** + /** * Initialize * @return true if success */ - bool Init(); + bool Init(); - /** + /** * Track in normal mode * @return true if success */ - bool Track(); + bool Track(); - /** + /** * Reset when lost * @return true if success */ - bool Reset(); + bool Reset(); - /** - * Detect Feature & Calculate ORB Descriptors - * @return true if success - */ - bool DetectFeature(); + bool TrackLastFrame(std::vector &points_3d, std::vector &points_2d, + std::vector &last_to_be_tri, std::vector ¤t_to_be_tri, + std::vector &matches); + + bool MatchLastFrame(std::vector &points_3d, std::vector &points_2d, + std::vector &last_to_be_tri, std::vector ¤t_to_be_tri, + std::vector &matches); + + bool InsertKeyFrame(std::vector &last_to_be_tri, std::vector ¤t_to_be_tri, + std::vector &matches); + + bool EstimatePnP(std::vector &points_3d, std::vector &points_2d); + + bool DetectFeature(); + + void RejectWithF(Frame::Ptr frame1, Frame::Ptr frame2, std::vector &matches,std::vector& status); + + bool DetectAndTriNewFeature(); + + bool MatchAndBuildMap(Frame::Ptr frame1, Frame::Ptr frame2); + + bool Match2D2D(Frame::Ptr &frame1, Frame::Ptr frame2, std::vector &matches, + std::vector &points1, std::vector &points2, int thres); + + bool Trangulation(Frame::Ptr &frame1, Frame::Ptr frame2, + std::vector &matches, + std::vector &points1, std::vector &points2); + + bool EstimateWithMatches(Frame::Ptr &frame1, Frame::Ptr frame2, + std::vector &points1, std::vector &points2); + + // data + FrontendStatus status_ = FrontendStatus::INITING; - // data - FrontendStatus status_ = FrontendStatus::INITING; + Frame::Ptr current_frame_ = nullptr; // 当前帧 + Frame::Ptr last_frame_ = nullptr; // 上一帧 + Frame::Ptr first_frame_ = nullptr; // 第0帧 - Frame::Ptr current_frame_ = nullptr; // 当前帧 - Frame::Ptr last_frame_ = nullptr; // 上一帧 - Camera::Ptr camera_ = nullptr; // 左侧相机 + Camera::Ptr camera_ = nullptr; // 左侧相机 - Map::Ptr map_ = nullptr; - std::shared_ptr backend_ = nullptr; - std::shared_ptr viewer_ = nullptr; + Map::Ptr map_ = nullptr; + std::shared_ptr backend_ = nullptr; + std::shared_ptr viewer_ = nullptr; - SE3 relative_motion_; // 当前帧与上一帧的相对运动,用于估计当前帧pose初值 + SE3 relative_motion_; // 当前帧与上一帧的相对运动,用于估计当前帧pose初值 - int tracking_inliers_ = 0; // inliers, used for testing new keyframes + int tracking_inliers_ = 0; // inliers, used for testing new keyframes - // params - int num_features_ = 200; - int num_features_init_ = 100; - int num_features_tracking_ = 50; - int num_features_tracking_bad_ = 20; - int num_features_needed_for_keyframe_ = 80; + // params + int num_features_ = 200; + int num_features_init_ = 100; + int num_features_tracking_ = 50; + int num_features_tracking_bad_ = 20; + int num_for_keyframe_ = 100; + double ratio_for_keyframe_ = 0.35; - // utilities - cv::Ptr orb_; // feature detector in opencv -}; + // utilities + cv::Ptr gftt_; -} // namespace ECT_SLAM + cv::Ptr detector_; + cv::Ptr descriptor_; + cv::Ptr matcher_; + }; +} // namespace ECT_SLAM diff --git a/include/g2o_types.hpp b/include/g2o_types.hpp new file mode 100644 index 0000000..c01cc75 --- /dev/null +++ b/include/g2o_types.hpp @@ -0,0 +1,150 @@ +#pragma once +#include "common_include.hpp" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace ECT_SLAM { +/// vertex and edges used in g2o ba +/// 位姿顶点 +class VertexPose : public g2o::BaseVertex<6, SE3> { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW; + + virtual void setToOriginImpl() override { _estimate = SE3(); } + + /// left multiplication on SE3 + virtual void oplusImpl(const double *update) override { + Vec6 update_eigen; + update_eigen << update[0], update[1], update[2], update[3], update[4], + update[5]; + _estimate = SE3::exp(update_eigen) * _estimate; + } + + virtual bool read(std::istream &in) override { return true; } + + virtual bool write(std::ostream &out) const override { return true; } +}; + +/// 路标顶点 +class VertexXYZ : public g2o::BaseVertex<3, Vec3> { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW; + virtual void setToOriginImpl() override { _estimate = Vec3::Zero(); } + + virtual void oplusImpl(const double *update) override { + _estimate[0] += update[0]; + _estimate[1] += update[1]; + _estimate[2] += update[2]; + } + + virtual bool read(std::istream &in) override { return true; } + + virtual bool write(std::ostream &out) const override { return true; } +}; + +/// 仅估计位姿的一元边 +class EdgeProjectionPoseOnly : public g2o::BaseUnaryEdge<2, Vec2, VertexPose> { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW; + + EdgeProjectionPoseOnly(const Vec3 &pos, const Mat33 &K) + : _pos3d(pos), _K(K) {} + + virtual void computeError() override { + const VertexPose *v = static_cast(_vertices[0]); + SE3 T = v->estimate(); + Vec3 pos_pixel = _K * (T * _pos3d); + pos_pixel /= pos_pixel[2]; + _error = _measurement - pos_pixel.head<2>(); + } + + virtual void linearizeOplus() override { + const VertexPose *v = static_cast(_vertices[0]); + SE3 T = v->estimate(); + Vec3 pos_cam = T * _pos3d; + double fx = _K(0, 0); + double fy = _K(1, 1); + double X = pos_cam[0]; + double Y = pos_cam[1]; + double Z = pos_cam[2]; + double Zinv = 1.0 / (Z + 1e-18); + double Zinv2 = Zinv * Zinv; + _jacobianOplusXi << -fx * Zinv, 0, fx * X * Zinv2, fx * X * Y * Zinv2, + -fx - fx * X * X * Zinv2, fx * Y * Zinv, 0, -fy * Zinv, + fy * Y * Zinv2, fy + fy * Y * Y * Zinv2, -fy * X * Y * Zinv2, + -fy * X * Zinv; + } + + virtual bool read(std::istream &in) override { return true; } + + virtual bool write(std::ostream &out) const override { return true; } + + private: + Vec3 _pos3d; + Mat33 _K; +}; + +/// 带有地图和位姿的二元边 +class EdgeProjection + : public g2o::BaseBinaryEdge<2, Vec2, VertexPose, VertexXYZ> { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW; + + /// 构造时传入相机内外参 + EdgeProjection(const Mat33 &K, const SE3 &cam_ext) : _K(K) { + _cam_ext = cam_ext; + } + + virtual void computeError() override { + const VertexPose *v0 = static_cast(_vertices[0]); + const VertexXYZ *v1 = static_cast(_vertices[1]); + SE3 T = v0->estimate(); + Vec3 pos_pixel = _K * (_cam_ext * (T * v1->estimate())); + pos_pixel /= pos_pixel[2]; + _error = _measurement - pos_pixel.head<2>(); + } + + virtual void linearizeOplus() override { + const VertexPose *v0 = static_cast(_vertices[0]); + const VertexXYZ *v1 = static_cast(_vertices[1]); + SE3 T = v0->estimate(); + Vec3 pw = v1->estimate(); + Vec3 pos_cam = _cam_ext * T * pw; + double fx = _K(0, 0); + double fy = _K(1, 1); + double X = pos_cam[0]; + double Y = pos_cam[1]; + double Z = pos_cam[2]; + double Zinv = 1.0 / (Z + 1e-18); + double Zinv2 = Zinv * Zinv; + _jacobianOplusXi << -fx * Zinv, 0, fx * X * Zinv2, fx * X * Y * Zinv2, + -fx - fx * X * X * Zinv2, fx * Y * Zinv, 0, -fy * Zinv, + fy * Y * Zinv2, fy + fy * Y * Y * Zinv2, -fy * X * Y * Zinv2, + -fy * X * Zinv; + + _jacobianOplusXj = _jacobianOplusXi.block<2, 3>(0, 0) * + _cam_ext.rotationMatrix() * T.rotationMatrix(); + } + + virtual bool read(std::istream &in) override { return true; } + + virtual bool write(std::ostream &out) const override { return true; } + + private: + Mat33 _K; + SE3 _cam_ext; +}; + +} // namespace ECT_SLAM + diff --git a/include/map.hpp b/include/map.hpp index 98034f3..c7b8a40 100644 --- a/include/map.hpp +++ b/include/map.hpp @@ -15,7 +15,7 @@ class Map { EIGEN_MAKE_ALIGNED_OPERATOR_NEW; typedef std::shared_ptr Ptr; typedef std::unordered_map LandmarksType; - typedef std::unordered_map KeyframesType; + typedef std::map KeyframesType; Map() {} @@ -63,7 +63,7 @@ class Map { Frame::Ptr current_frame_ = nullptr; // settings - int num_active_keyframes_ = 7; // 激活的关键帧数量 + int num_active_keyframes_ = 10; // 激活的关键帧数量 }; } // namespace ECT_SLAM diff --git a/include/orb.hpp b/include/orb.hpp deleted file mode 100644 index 25bfb5a..0000000 --- a/include/orb.hpp +++ /dev/null @@ -1,27 +0,0 @@ -#include -#include -#include - -using namespace std; - -// 32 bit unsigned int, will have 8, 8x32=256 -typedef vector DescType; // Descriptor type - -/** - * compute descriptor of orb keypoints - * @param img input image - * @param keypoints detected fast keypoints - * @param descriptors descriptors - * - * NOTE: if a keypoint goes outside the image boundary (8 pixels), descriptors will not be computed and will be left as - * empty - */ -void ComputeORB(const cv::Mat &img, vector &keypoints, vector &descriptors); - -/** - * brute-force match two sets of descriptors - * @param desc1 the first descriptor - * @param desc2 the second descriptor - * @param matches matches of two images - */ -void BfMatch(const vector &desc1, const vector &desc2, vector &matches); diff --git a/include/system.hpp b/include/system.hpp index 2524269..987746f 100644 --- a/include/system.hpp +++ b/include/system.hpp @@ -3,6 +3,7 @@ #include #include #include +#include #include #include @@ -36,6 +37,8 @@ class System // Returns the camera pose (empty if tracking fails). cv::Mat TrackMonocular(const cv::Mat &im, const double ×tamp); + void ProcessImage(std::string & image_path, const double & timestamp); + bool Init(); // All threads will be requested to finish. @@ -43,7 +46,7 @@ class System // This function must be called before saving the trajectory. void Shutdown(); - // Save keyframe poses in the TUM RGB-D dataset format. + // Save keyframe poses in the TUM dataset format. // This method works for all sensor input. // Call first Shutdown() // See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset diff --git a/include/viewer.hpp b/include/viewer.hpp index 79b2bb9..61f8110 100644 --- a/include/viewer.hpp +++ b/include/viewer.hpp @@ -25,14 +25,35 @@ class Viewer { void Close(); + // 增加一个当前帧 + void AddCurrentFrame(Frame::Ptr current_frame); + + // 更新地图 + void UpdateMap(); + private: void ThreadLoop(); + void DrawFrame(Frame::Ptr frame, const float* color); + + void DrawMapPoints(); + + void FollowCurrentFrame(pangolin::OpenGlRenderState& vis_camera); + + /// plot the features in current frame into an image + cv::Mat PlotFrameImage(); + + Frame::Ptr current_frame_ = nullptr; Map::Ptr map_ = nullptr; std::thread viewer_thread_; bool viewer_running_ = true; + + Map::KeyframesType active_keyframes_; + Map::LandmarksType active_landmarks_; + bool map_updated_ = false; + std::mutex viewer_data_mutex_; }; } // namespace ECT_SLAM diff --git a/src/backend.cpp b/src/backend.cpp index 8b8e935..9c1cda9 100644 --- a/src/backend.cpp +++ b/src/backend.cpp @@ -3,40 +3,192 @@ #include "feature.hpp" #include "map.hpp" #include "mappoint.hpp" +#include "g2o_types.hpp" +#include "config.hpp" -namespace ECT_SLAM { +namespace ECT_SLAM +{ -Backend::Backend() { - backend_running_.store(true); - backend_thread_ = std::thread(std::bind(&Backend::BackendLoop, this)); -} + Backend::Backend() + { + backend_running_.store(true); + backend_thread_ = std::thread(std::bind(&Backend::BackendLoop, this)); + } -void Backend::UpdateMap() { - std::unique_lock lock(data_mutex_); - map_update_.notify_one(); -} + void Backend::UpdateMap() + { + std::unique_lock lock(data_mutex_); + map_update_.notify_one(); + } -void Backend::Stop() { - backend_running_.store(false); - map_update_.notify_one(); - backend_thread_.join(); -} + void Backend::Stop() + { + backend_running_.store(false); + map_update_.notify_one(); + backend_thread_.join(); + } -void Backend::BackendLoop() { - while (backend_running_.load()) { - std::unique_lock lock(data_mutex_); - map_update_.wait(lock); + void Backend::BackendLoop() + { + while (backend_running_.load()) + { + std::unique_lock lock(data_mutex_); + map_update_.wait(lock); - /// 后端仅优化激活的Frames和Landmarks - Map::KeyframesType active_kfs = map_->GetActiveKeyFrames(); - Map::LandmarksType active_landmarks = map_->GetActiveMapPoints(); - Optimize(active_kfs, active_landmarks); + /// 后端仅优化激活的Frames和Landmarks + Map::KeyframesType active_kfs = map_->GetAllKeyFrames(); + Map::LandmarksType landmarks = map_->GetAllMapPoints(); + Optimize(active_kfs, landmarks); + } } -} -void Backend::Optimize(Map::KeyframesType &keyframes, - Map::LandmarksType &landmarks) { - -} + void Backend::Optimize(Map::KeyframesType &keyframes_, Map::LandmarksType &landmarks) + { + auto it = keyframes_.begin(); + if (keyframes_.size() > 20) + std::advance(it, keyframes_.size() - 20); + Map::KeyframesType keyframes = Map::KeyframesType(it, keyframes_.end()); + std::cout << "#BackEnd# Optimizing [" << keyframes.begin()->first << "," + << keyframes.begin()->first + keyframes.size() << "]" << std::endl; + + //-----------------setup g2o----------------- + typedef g2o::BlockSolver_6_3 BlockSolverType; + typedef g2o::LinearSolverCSparse + LinearSolverType; + auto solver = new g2o::OptimizationAlgorithmLevenberg( + g2o::make_unique( + g2o::make_unique())); + g2o::SparseOptimizer optimizer; + optimizer.setAlgorithm(solver); + + //-----------------paras for vertex & edge----------------- + // pose 顶点,使用Keyframe id + std::map vertices; + // 路标顶点,使用路标id索引 + std::map vertices_landmarks; + // edges + int index = 1; + double chi2_th = Config::Get("chi2_th"); //5.991; // robust kernel 阈值 + std::map edges_and_features; + // K 和左右外参 + Mat33 K = camera_->K_d(); + SE3 ext = SE3(); + unsigned long max_kf_id = 20; + int count = 0; + + //-----------------setup vertex & edge----------------- + for (auto &keyframe : keyframes) + { + auto kf = keyframe.second; + VertexPose *vertex_pose = new VertexPose(); // camera vertex_pose + vertex_pose->setId(count); + vertex_pose->setEstimate(kf->Pose()); + if (count++ < 2) + vertex_pose->setFixed(true); + optimizer.addVertex(vertex_pose); + vertices.insert({kf->keyframe_id_, vertex_pose}); + + for (auto &feat : kf->features_) + { + if (feat->is_outlier_ || feat->map_point_.lock() == nullptr) + continue; + auto landmark = feat->map_point_.lock(); + unsigned long landmark_id = landmark->id_; + + // 如果landmark还没有被加入优化,则新加一个顶点 + if (vertices_landmarks.find(landmark_id) == + vertices_landmarks.end()) + { + VertexXYZ *v = new VertexXYZ; + v->setEstimate(landmark->Pos()); + v->setId(landmark_id + max_kf_id + 1); + v->setMarginalized(true); + vertices_landmarks.insert({landmark_id, v}); + optimizer.addVertex(v); + } + + EdgeProjection *edge = nullptr; + edge = new EdgeProjection(K, ext); + + edge->setId(index); + edge->setVertex(0, vertices.at(kf->keyframe_id_)); // pose + edge->setVertex(1, vertices_landmarks.at(landmark_id)); // landmark + edge->setMeasurement(toVec2(feat->position_.pt)); + edge->setInformation(Mat22::Identity()); + auto rk = new g2o::RobustKernelHuber(); + rk->setDelta(chi2_th); + edge->setRobustKernel(rk); + edges_and_features.insert({edge, feat}); + + optimizer.addEdge(edge); + + index++; + } + } + + //-----------------do optimization and eliminate the outliers----------------- + optimizer.initializeOptimization(); + optimizer.optimize(Config::Get("num_iteration")); + + int cnt_outlier = 0, cnt_inlier = 0; + int iteration = 0; + while (iteration < 8) + { + cnt_outlier = 0; + cnt_inlier = 0; + // determine if we want to adjust the outlier threshold + for (auto &ef : edges_and_features) + { + if (ef.first->chi2() > chi2_th) + { + cnt_outlier++; + } + else + { + cnt_inlier++; + } + } + double inlier_ratio = cnt_inlier / double(cnt_inlier + cnt_outlier); + if (inlier_ratio > 0.5) + { + break; + } + else + { + chi2_th *= 2; + iteration++; + } + } + + for (auto &ef : edges_and_features) + { + if (ef.first->chi2() > chi2_th) + { + ef.second->is_outlier_ = true; + // remove the observation + if (auto lock = ef.second->map_point_.lock()) + lock->RemoveObservation(ef.second); + } + else + { + ef.second->is_outlier_ = false; + } + } + + LOG(INFO) << "Outlier/Inlier in optimization: " << cnt_outlier << "/" << cnt_inlier; + + //-----------------Set pose and lanrmark position----------------- + auto iter = keyframes.begin(); + for (auto &v : vertices) + { + (iter->second)->SetPose(v.second->estimate()); + iter++; + } + + for (auto &v : vertices_landmarks) + { + landmarks.at(v.first)->SetPos(v.second->estimate()); + } + } -} // namespace ECT_SLAM \ No newline at end of file +} // namespace ECT_SLAM \ No newline at end of file diff --git a/src/frontend.cpp b/src/frontend.cpp index 547e19f..99fab80 100644 --- a/src/frontend.cpp +++ b/src/frontend.cpp @@ -7,21 +7,31 @@ #include "frontend.hpp" #include "map.hpp" #include "viewer.hpp" -#include "orb.hpp" namespace ECT_SLAM { - Frontend::Frontend() { - orb_ = cv::ORB::create(); + gftt_ = + cv::GFTTDetector::create(Config::Get("num_features"), 0.01, 20); + + detector_ = cv::ORB::create(); + descriptor_ = cv::ORB::create(); + matcher_ = cv::DescriptorMatcher::create("BruteForce-Hamming"); + num_features_init_ = Config::Get("num_features_init"); num_features_ = Config::Get("num_features"); + num_for_keyframe_ = Config::Get("num_for_keyframe"); + ratio_for_keyframe_ = Config::Get("ratio_for_keyframe"); } bool Frontend::AddFrame(ECT_SLAM::Frame::Ptr frame) { + last_frame_ = current_frame_; current_frame_ = frame; + if (current_frame_->id_ == 0) + first_frame_ = frame; + switch (status_) { case FrontendStatus::INITING: @@ -36,32 +46,363 @@ namespace ECT_SLAM break; } - last_frame_ = current_frame_; return true; } bool Frontend::Init() { std::cout << "Initing...\n"; + DetectFeature(); - status_ = FrontendStatus::TRACKING_GOOD; + if (current_frame_->id_ >= 10) + { + if (MatchAndBuildMap(first_frame_, current_frame_)) + { + status_ = FrontendStatus::TRACKING_GOOD; + if (viewer_) + { + viewer_->AddCurrentFrame(current_frame_); + viewer_->UpdateMap(); + } + } + } + + return true; + } + + void Frontend::RejectWithF(Frame::Ptr frame1, Frame::Ptr frame2, std::vector &matches, std::vector &status) + { + if (matches.size() >= 8) + { + std::vector points1, points2; + for (auto m : matches) + { + auto last_feature = frame1->features_[m.queryIdx]; + auto current_feature = frame2->features_[m.trainIdx]; + points1.emplace_back(last_feature->position_.pt.x, last_feature->position_.pt.y); + points2.emplace_back(current_feature->position_.pt.x, current_feature->position_.pt.y); + } + + cv::findFundamentalMat(points1, points1, cv::FM_RANSAC, 1.0, 0.99, status); + } + } + + bool Frontend::MatchLastFrame(std::vector &points_3d, std::vector &points_2d, + std::vector &last_to_be_tri, std::vector ¤t_to_be_tri, + std::vector &matches) + { + std::vector bf_matches; + matcher_->match(last_frame_->descriptors_, current_frame_->descriptors_, bf_matches); + cv::Mat mask(last_frame_->img_.size(), CV_8UC1, 255); + int num_good_pts = 0; + + std::vector status; + RejectWithF(last_frame_, current_frame_, bf_matches, status); + + auto min_max = minmax_element(bf_matches.begin(), bf_matches.end(), + [](const cv::DMatch &m1, const cv::DMatch &m2) + { return m1.distance < m2.distance; }); + double min_dist = min_max.first->distance; + double max_dist = min_max.second->distance; + + int i = 0; + //! Match with 3D & Get MASK + for (auto m : bf_matches) + { + if (!status[i++]) + continue; + auto last_feature = last_frame_->features_[m.queryIdx]; + auto current_feature = current_frame_->features_[m.trainIdx]; + + if (abs(last_feature->position_.pt.x - current_feature->position_.pt.x) > 40 || abs(last_feature->position_.pt.y - current_feature->position_.pt.y) > 40) + continue; + + if (m.distance > std::max(2 * min_dist, 60.0)) + continue; + current_feature->map_point_ = last_feature->map_point_; + if (auto mp = last_feature->map_point_.lock()) + { + num_good_pts++; + auto pt3d = mp->Pos(); + points_3d.emplace_back(pt3d[0], pt3d[1], pt3d[2]); + points_2d.emplace_back(current_feature->position_.pt.x, current_feature->position_.pt.y); + + // Add Obs + mp->AddObservation(current_feature); + current_feature->status_ = STATUS::MATCH3D; + + cv::rectangle(mask, last_feature->position_.pt - cv::Point2f(3, 3), + last_feature->position_.pt + cv::Point2f(3, 3), 0, CV_FILLED); + } + } + + i = 0; + //! Get 2D-Matches not within mask + for (auto m : bf_matches) + { + if (!status[i++]) + continue; + auto last_feature = last_frame_->features_[m.queryIdx]; + auto current_feature = current_frame_->features_[m.trainIdx]; + + if (abs(last_feature->position_.pt.x - current_feature->position_.pt.x) > 40 || abs(last_feature->position_.pt.y - current_feature->position_.pt.y) > 40) + continue; + + if (m.distance > std::max(2 * min_dist, 60.0)) + continue; + + if (!mask.at(last_feature->position_.pt)) + continue; + + if (last_feature->map_point_.expired()) + { + last_to_be_tri.emplace_back(last_feature->position_.pt.x, last_feature->position_.pt.y); + current_to_be_tri.emplace_back(current_feature->position_.pt.x, current_feature->position_.pt.y); + matches.push_back(m); + + current_feature->status_ = STATUS::MATCH2D; + } + } + + double ratio = (double)num_good_pts / (double)(num_good_pts + matches.size()); + LOG(INFO) << "ratio: " << num_good_pts << "/" << (num_good_pts + matches.size()) << " = " << ratio << ". "; + + if (ratio < ratio_for_keyframe_) + return true; + if (num_good_pts < num_for_keyframe_) + return true; + return false; + } + + bool Frontend::TrackLastFrame(std::vector &points_3d, std::vector &points_2d, + std::vector &last_to_be_tri, std::vector ¤t_to_be_tri, + std::vector &matches) + + { + // use LK flow to estimate points in the right image + std::vector kps_last, kps_current; + for (auto &kp : last_frame_->features_) + { + // if (kp->map_point_.lock()) + // { + // // use project point + // auto mp = kp->map_point_.lock(); + // auto px = + // camera_->world2pixel(mp->pos_, current_frame_->Pose()); + // kps_last.push_back(kp->position_.pt); + // kps_current.push_back(cv::Point2f(px[0], px[1])); + // } + // else + { + kps_last.push_back(kp->position_.pt); + kps_current.push_back(kp->position_.pt); + } + } + + std::vector status; + cv::Mat error; + cv::calcOpticalFlowPyrLK( + last_frame_->img_, current_frame_->img_, kps_last, + kps_current, status, error, cv::Size(11, 11), 1, + cv::TermCriteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, + 0.01), + cv::OPTFLOW_USE_INITIAL_FLOW); + + int num_good_pts = 0; + int k = 0; // count feature in current_frame_ + for (size_t i = 0; i < status.size(); ++i) + { + if (status[i]) + { + cv::KeyPoint kp(kps_current[i], 7); + Feature::Ptr feature(new Feature(current_frame_, kp)); + feature->map_point_ = last_frame_->features_[i]->map_point_; + current_frame_->features_.push_back(feature); + + if (auto mp = feature->map_point_.lock()) + { + num_good_pts++; + auto pt3d = mp->Pos(); + points_3d.emplace_back(pt3d[0], pt3d[1], pt3d[2]); + points_2d.push_back(kps_current[i]); + + // Add Obs + mp->AddObservation(feature); + feature->status_ = STATUS::MATCH3D; + } + else + { + last_to_be_tri.push_back(kps_last[i]); + current_to_be_tri.push_back(kps_current[i]); + matches.push_back(cv::DMatch{i, k, 0}); + + feature->status_ = STATUS::MATCH2D; + } + k++; + } + } + + double ratio = (double)num_good_pts / (double)(kps_last.size()); + LOG(INFO) << "ratio: " << num_good_pts << "/" << kps_last.size() << " = " << ratio << ". "; + + if (ratio < ratio_for_keyframe_) + return true; + if (num_good_pts < num_for_keyframe_) + return true; + return false; + } + + bool Frontend::EstimatePnP(std::vector &points_3d, std::vector &points_2d) + { + if (points_3d.size() < 5) + return false; + auto pose = current_frame_->Pose(); + auto se3 = pose.log(); + std::vector rvec{se3(3, 0), se3(4, 0), se3(5, 0)}; + std::vector tvec{se3(0, 0), se3(1, 0), se3(2, 0)}; + + cv::solvePnP(points_3d, points_2d, camera_->K_cv(), cv::Mat(), rvec, tvec, true, cv::SOLVEPNP_ITERATIVE); + Vec6 new_se3; + new_se3 << tvec[0], tvec[1], tvec[2], rvec[0], rvec[1], rvec[2]; + current_frame_->SetPose(SE3::exp(new_se3)); + + std::cout << "=POSE= " << rvec[0] << " " << rvec[1] << " " << rvec[2] << " " + << tvec[0] << " " << tvec[1] << " " << tvec[2] << std::endl; return true; } bool Frontend::Track() { - std::cout << "Tracking No." << current_frame_->id_ << " ...\n"; + std::cout << "Tracking No." << current_frame_->id_ << " ... "; + + // initial guess + // current_frame_->SetPose(last_frame_->Pose()); + current_frame_->SetPose(relative_motion_ * last_frame_->Pose()); + + //!--------------PnP Estimate With 2D-3D Matches(map)-------------------------- + std::vector matches; + std::vector points_2d; + std::vector points_3d; + std::vector last_to_be_tri; + std::vector current_to_be_tri; + + DetectFeature(); + + bool is_keyframe; + is_keyframe = MatchLastFrame(points_3d, points_2d, last_to_be_tri, current_to_be_tri, matches); + + EstimatePnP(points_3d, points_2d); - if (last_frame_) + // //!--------------Add New MapPoints With 2D-2D Matches(last frame)-------------- + if (is_keyframe) { - current_frame_->SetPose(relative_motion_ * last_frame_->Pose()); + InsertKeyFrame(last_to_be_tri, current_to_be_tri, matches); } + // end stage status_ = FrontendStatus::TRACKING_GOOD; - relative_motion_ = current_frame_->Pose() * last_frame_->Pose().inverse(); + if (viewer_) + viewer_->AddCurrentFrame(current_frame_); + return true; + } + + bool Frontend::DetectAndTriNewFeature() + { + std::vector points1, points2; + cv::Mat mask(last_frame_->img_.size(), CV_8UC1, 255); + for (auto &feat : last_frame_->features_) + { + cv::rectangle(mask, feat->position_.pt - cv::Point2f(10, 10), + feat->position_.pt + cv::Point2f(10, 10), 0, CV_FILLED); + } + std::vector keypoints, final_kps; + gftt_->detect(last_frame_->img_, keypoints, mask); + + std::vector kps_last, kps_current; + + for (auto &kp : keypoints) + { + kps_last.push_back(kp.pt); + kps_current.push_back(kp.pt); + final_kps.push_back(kp); + } + + std::vector status; + cv::Mat error; + cv::calcOpticalFlowPyrLK( + last_frame_->img_, current_frame_->img_, kps_last, + kps_current, status, error, cv::Size(11, 11), 1, + cv::TermCriteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, + 0.01), + cv::OPTFLOW_USE_INITIAL_FLOW); + + for (size_t i = 0; i < status.size(); ++i) + { + if (status[i]) + { + points1.push_back(kps_last[i]); + points2.push_back(kps_current[i]); + } + } + + if (!points2.size()) + return false; + + cv::Mat P1, P2; + Mat34f P1_e = camera_->K() * last_frame_->RT(); + Mat34f P2_e = camera_->K() * current_frame_->RT(); + cv::eigen2cv(P1_e, P1); + cv::eigen2cv(P2_e, P2); + + cv::Mat pointsH(1, points2.size(), CV_32FC4); + cv::Mat points3F; + + cv::triangulatePoints(P1, P2, points1, points2, pointsH); + cv::convertPointsFromHomogeneous(pointsH.t(), points3F); + + SE3 pose_Tcw = last_frame_->Pose().inverse(); + for (int i = 0; i < points2.size(); i++) + { + if (points3F.at(i, 0)[2] <= 0) + continue; + + Vec3 pworld = Vec3(points3F.at(i, 0)[0], points3F.at(i, 0)[1], points3F.at(i, 0)[2]); + + Feature::Ptr current_feature(new Feature(current_frame_, final_kps[i])); + current_frame_->features_.push_back(current_feature); + + // auto new_map_point = MapPoint::CreateNewMappoint(); + // pworld = pose_Tcw * pworld; + // new_map_point->SetPos(pworld); + // new_map_point->AddObservation(current_feature); + + // current_feature->map_point_ = new_map_point; + current_feature->status_ = STATUS::MATCH2D; + + // map_->InsertMapPoint(new_map_point); + } + std::cout << "DetectAndAdd " << points2.size() << " new features\n"; + + return true; + } + + bool Frontend::InsertKeyFrame(std::vector &last_to_be_tri, std::vector ¤t_to_be_tri, + std::vector &matches) + { + //!---------------------Triangulate------------------- + Trangulation(last_frame_, current_frame_, matches, last_to_be_tri, current_to_be_tri); + + //!-------------------End Stage------------------------ + current_frame_->SetKeyFrame(); + map_->InsertKeyFrame(current_frame_); + backend_->UpdateMap(); + if (viewer_) + { + viewer_->UpdateMap(); + } return true; } @@ -71,19 +412,144 @@ namespace ECT_SLAM return true; } - bool Frontend::DetectFeature(){ + bool Frontend::DetectFeature() + { std::vector keypoints; - std::vector descriptors; cv::FAST(current_frame_->img_, keypoints, 40); - ComputeORB(current_frame_->img_, keypoints, descriptors); + descriptor_->compute(current_frame_->img_, keypoints, current_frame_->descriptors_); - for(int i = 0; i < keypoints.size(); i++){ - if(!descriptors[i].size()) continue; - Feature::Ptr feature(new Feature(current_frame_, keypoints[i], descriptors[i])); + for (int i = 0; i < keypoints.size(); i++) + { + Feature::Ptr feature(new Feature(current_frame_, keypoints[i])); current_frame_->features_.push_back(feature); } - // std::cout << current_frame_->features_.size() << " / " << keypoints.size()<< std::endl; + return true; + } + + bool Frontend::Match2D2D(Frame::Ptr &frame1, Frame::Ptr frame2, + std::vector &matches, + std::vector &points1, std::vector &points2, int thres = 8) + { + std::vector bf_matches; + matcher_->match(frame1->descriptors_, frame2->descriptors_, bf_matches); + + auto min_max = minmax_element(bf_matches.begin(), bf_matches.end(), + [](const cv::DMatch &m1, const cv::DMatch &m2) + { return m1.distance < m2.distance; }); + double min_dist = min_max.first->distance; + double max_dist = min_max.second->distance; + + for (auto m : bf_matches) + { + + if (m.distance > std::max(2 * min_dist, 60.0)) + continue; + matches.push_back(m); + points1.emplace_back(frame1->features_[m.queryIdx]->position_.pt.x, frame1->features_[m.queryIdx]->position_.pt.y); + points2.emplace_back(frame2->features_[m.trainIdx]->position_.pt.x, frame2->features_[m.trainIdx]->position_.pt.y); + } + if (matches.size() < thres) + return false; + return true; + } + + bool Frontend::Trangulation(Frame::Ptr &frame1, Frame::Ptr frame2, + std::vector &matches, + std::vector &points1, std::vector &points2) + { + if (matches.size() == 0) + return true; + + cv::Mat P1, P2; + Mat34f P1_e = camera_->K() * frame1->RT(); + Mat34f P2_e = camera_->K() * frame2->RT(); + cv::eigen2cv(P1_e, P1); + cv::eigen2cv(P2_e, P2); + + // std::cout << "-----first-----\n"<< P1<< std::endl; + // std::cout << "----current----\n"<< P2 << std::endl; + + cv::Mat pointsH(1, matches.size(), CV_32FC4); + cv::Mat points3F; + + cv::triangulatePoints(P1, P2, points1, points2, pointsH); + cv::convertPointsFromHomogeneous(pointsH.t(), points3F); + // std::cout << "----map_points 3F----\n" << points3F.rows << " " << points3F.cols << " " << points3F.channels() << " " << points3F.type() << std::endl; + + SE3 pose_Tcw = frame1->Pose().inverse(); + for (int i = 0; i < matches.size(); i++) + { + if (points3F.at(i, 0)[2] <= 0) + continue; + + Vec3 pworld = Vec3(points3F.at(i, 0)[0], points3F.at(i, 0)[1], points3F.at(i, 0)[2]); + + auto new_map_point = MapPoint::CreateNewMappoint(); + pworld = pose_Tcw * pworld; + new_map_point->SetPos(pworld); + new_map_point->AddObservation(frame1->features_[matches[i].queryIdx]); + new_map_point->AddObservation(frame2->features_[matches[i].trainIdx]); + + frame1->features_[matches[i].queryIdx]->map_point_ = new_map_point; + frame2->features_[matches[i].trainIdx]->map_point_ = new_map_point; + frame2->features_[matches[i].trainIdx]->status_ = STATUS::MATCH2D; + + map_->InsertMapPoint(new_map_point); + } + std::cout << "Trangulation " << matches.size() << " new mappoints\n"; + return true; + } + + bool Frontend::EstimateWithMatches(Frame::Ptr &frame1, Frame::Ptr frame2, + std::vector &points1, std::vector &points2) + { + cv::Mat K = camera_->K_cv(); + cv::Mat E, R, t, r; + E = cv::findEssentialMat(points1, points2, K, cv::RANSAC, 0.999, 1.0); + cv::recoverPose(E, points1, points2, K, R, t); + // cv::Rodrigues(r, R); + + Mat33 R_e; + Vec3 t_e; + cv::cv2eigen(R, R_e); + cv::cv2eigen(t, t_e); + SE3 pose(R_e, t_e); + + frame2->SetPose(pose * frame1->Pose()); + + // std::cout << "------R------\n"< matches; + std::vector points1, points2; + //!-----------------------Match---------------------------- + if (!Match2D2D(frame1, frame2, matches, points1, points2)) + return false; + + //!-----------------------Estimate---------------------------- + EstimateWithMatches(frame1, frame2, points1, points2); + + //!-----------------------Trangulation & Build Map From 2D-2D Matches---------------------------- + Trangulation(frame1, frame2, matches, points1, points2); + + //!-----------------------Insert KeyFrame--------------------- + first_frame_->SetKeyFrame(); + current_frame_->SetKeyFrame(); + map_->InsertKeyFrame(first_frame_); + map_->InsertKeyFrame(current_frame_); + return true; } diff --git a/src/map.cpp b/src/map.cpp index 5372e9b..5d02b40 100644 --- a/src/map.cpp +++ b/src/map.cpp @@ -58,7 +58,7 @@ void Map::RemoveOldKeyframe() { frame_to_remove = keyframes_.at(max_kf_id); } - std::cout << "remove keyframe " << frame_to_remove->keyframe_id_ << std::endl; + // std::cout << "remove keyframe " << frame_to_remove->keyframe_id_ << std::endl; // remove keyframe and landmark observation active_keyframes_.erase(frame_to_remove->keyframe_id_); for (auto feat : frame_to_remove->features_) { @@ -82,7 +82,7 @@ void Map::CleanMap() { ++iter; } } - std::cout << "Removed " << cnt_landmark_removed << " active landmarks \n"; + // std::cout << "Removed " << cnt_landmark_removed << " active landmarks \n"; } } // namespace ECT_SLAM diff --git a/src/orb.cpp b/src/orb.cpp deleted file mode 100644 index bfde35d..0000000 --- a/src/orb.cpp +++ /dev/null @@ -1,353 +0,0 @@ -#include "orb.hpp" -using namespace std; - -// -------------------------------------------------------------------------------------------------- // -// ORB pattern -int ORB_pattern[256 * 4] = { - 8, -3, 9, 5 /*mean (0), correlation (0)*/, - 4, 2, 7, -12 /*mean (1.12461e-05), correlation (0.0437584)*/, - -11, 9, -8, 2 /*mean (3.37382e-05), correlation (0.0617409)*/, - 7, -12, 12, -13 /*mean (5.62303e-05), correlation (0.0636977)*/, - 2, -13, 2, 12 /*mean (0.000134953), correlation (0.085099)*/, - 1, -7, 1, 6 /*mean (0.000528565), correlation (0.0857175)*/, - -2, -10, -2, -4 /*mean (0.0188821), correlation (0.0985774)*/, - -13, -13, -11, -8 /*mean (0.0363135), correlation (0.0899616)*/, - -13, -3, -12, -9 /*mean (0.121806), correlation (0.099849)*/, - 10, 4, 11, 9 /*mean (0.122065), correlation (0.093285)*/, - -13, -8, -8, -9 /*mean (0.162787), correlation (0.0942748)*/, - -11, 7, -9, 12 /*mean (0.21561), correlation (0.0974438)*/, - 7, 7, 12, 6 /*mean (0.160583), correlation (0.130064)*/, - -4, -5, -3, 0 /*mean (0.228171), correlation (0.132998)*/, - -13, 2, -12, -3 /*mean (0.00997526), correlation (0.145926)*/, - -9, 0, -7, 5 /*mean (0.198234), correlation (0.143636)*/, - 12, -6, 12, -1 /*mean (0.0676226), correlation (0.16689)*/, - -3, 6, -2, 12 /*mean (0.166847), correlation (0.171682)*/, - -6, -13, -4, -8 /*mean (0.101215), correlation (0.179716)*/, - 11, -13, 12, -8 /*mean (0.200641), correlation (0.192279)*/, - 4, 7, 5, 1 /*mean (0.205106), correlation (0.186848)*/, - 5, -3, 10, -3 /*mean (0.234908), correlation (0.192319)*/, - 3, -7, 6, 12 /*mean (0.0709964), correlation (0.210872)*/, - -8, -7, -6, -2 /*mean (0.0939834), correlation (0.212589)*/, - -2, 11, -1, -10 /*mean (0.127778), correlation (0.20866)*/, - -13, 12, -8, 10 /*mean (0.14783), correlation (0.206356)*/, - -7, 3, -5, -3 /*mean (0.182141), correlation (0.198942)*/, - -4, 2, -3, 7 /*mean (0.188237), correlation (0.21384)*/, - -10, -12, -6, 11 /*mean (0.14865), correlation (0.23571)*/, - 5, -12, 6, -7 /*mean (0.222312), correlation (0.23324)*/, - 5, -6, 7, -1 /*mean (0.229082), correlation (0.23389)*/, - 1, 0, 4, -5 /*mean (0.241577), correlation (0.215286)*/, - 9, 11, 11, -13 /*mean (0.00338507), correlation (0.251373)*/, - 4, 7, 4, 12 /*mean (0.131005), correlation (0.257622)*/, - 2, -1, 4, 4 /*mean (0.152755), correlation (0.255205)*/, - -4, -12, -2, 7 /*mean (0.182771), correlation (0.244867)*/, - -8, -5, -7, -10 /*mean (0.186898), correlation (0.23901)*/, - 4, 11, 9, 12 /*mean (0.226226), correlation (0.258255)*/, - 0, -8, 1, -13 /*mean (0.0897886), correlation (0.274827)*/, - -13, -2, -8, 2 /*mean (0.148774), correlation (0.28065)*/, - -3, -2, -2, 3 /*mean (0.153048), correlation (0.283063)*/, - -6, 9, -4, -9 /*mean (0.169523), correlation (0.278248)*/, - 8, 12, 10, 7 /*mean (0.225337), correlation (0.282851)*/, - 0, 9, 1, 3 /*mean (0.226687), correlation (0.278734)*/, - 7, -5, 11, -10 /*mean (0.00693882), correlation (0.305161)*/, - -13, -6, -11, 0 /*mean (0.0227283), correlation (0.300181)*/, - 10, 7, 12, 1 /*mean (0.125517), correlation (0.31089)*/, - -6, -3, -6, 12 /*mean (0.131748), correlation (0.312779)*/, - 10, -9, 12, -4 /*mean (0.144827), correlation (0.292797)*/, - -13, 8, -8, -12 /*mean (0.149202), correlation (0.308918)*/, - -13, 0, -8, -4 /*mean (0.160909), correlation (0.310013)*/, - 3, 3, 7, 8 /*mean (0.177755), correlation (0.309394)*/, - 5, 7, 10, -7 /*mean (0.212337), correlation (0.310315)*/, - -1, 7, 1, -12 /*mean (0.214429), correlation (0.311933)*/, - 3, -10, 5, 6 /*mean (0.235807), correlation (0.313104)*/, - 2, -4, 3, -10 /*mean (0.00494827), correlation (0.344948)*/, - -13, 0, -13, 5 /*mean (0.0549145), correlation (0.344675)*/, - -13, -7, -12, 12 /*mean (0.103385), correlation (0.342715)*/, - -13, 3, -11, 8 /*mean (0.134222), correlation (0.322922)*/, - -7, 12, -4, 7 /*mean (0.153284), correlation (0.337061)*/, - 6, -10, 12, 8 /*mean (0.154881), correlation (0.329257)*/, - -9, -1, -7, -6 /*mean (0.200967), correlation (0.33312)*/, - -2, -5, 0, 12 /*mean (0.201518), correlation (0.340635)*/, - -12, 5, -7, 5 /*mean (0.207805), correlation (0.335631)*/, - 3, -10, 8, -13 /*mean (0.224438), correlation (0.34504)*/, - -7, -7, -4, 5 /*mean (0.239361), correlation (0.338053)*/, - -3, -2, -1, -7 /*mean (0.240744), correlation (0.344322)*/, - 2, 9, 5, -11 /*mean (0.242949), correlation (0.34145)*/, - -11, -13, -5, -13 /*mean (0.244028), correlation (0.336861)*/, - -1, 6, 0, -1 /*mean (0.247571), correlation (0.343684)*/, - 5, -3, 5, 2 /*mean (0.000697256), correlation (0.357265)*/, - -4, -13, -4, 12 /*mean (0.00213675), correlation (0.373827)*/, - -9, -6, -9, 6 /*mean (0.0126856), correlation (0.373938)*/, - -12, -10, -8, -4 /*mean (0.0152497), correlation (0.364237)*/, - 10, 2, 12, -3 /*mean (0.0299933), correlation (0.345292)*/, - 7, 12, 12, 12 /*mean (0.0307242), correlation (0.366299)*/, - -7, -13, -6, 5 /*mean (0.0534975), correlation (0.368357)*/, - -4, 9, -3, 4 /*mean (0.099865), correlation (0.372276)*/, - 7, -1, 12, 2 /*mean (0.117083), correlation (0.364529)*/, - -7, 6, -5, 1 /*mean (0.126125), correlation (0.369606)*/, - -13, 11, -12, 5 /*mean (0.130364), correlation (0.358502)*/, - -3, 7, -2, -6 /*mean (0.131691), correlation (0.375531)*/, - 7, -8, 12, -7 /*mean (0.160166), correlation (0.379508)*/, - -13, -7, -11, -12 /*mean (0.167848), correlation (0.353343)*/, - 1, -3, 12, 12 /*mean (0.183378), correlation (0.371916)*/, - 2, -6, 3, 0 /*mean (0.228711), correlation (0.371761)*/, - -4, 3, -2, -13 /*mean (0.247211), correlation (0.364063)*/, - -1, -13, 1, 9 /*mean (0.249325), correlation (0.378139)*/, - 7, 1, 8, -6 /*mean (0.000652272), correlation (0.411682)*/, - 1, -1, 3, 12 /*mean (0.00248538), correlation (0.392988)*/, - 9, 1, 12, 6 /*mean (0.0206815), correlation (0.386106)*/, - -1, -9, -1, 3 /*mean (0.0364485), correlation (0.410752)*/, - -13, -13, -10, 5 /*mean (0.0376068), correlation (0.398374)*/, - 7, 7, 10, 12 /*mean (0.0424202), correlation (0.405663)*/, - 12, -5, 12, 9 /*mean (0.0942645), correlation (0.410422)*/, - 6, 3, 7, 11 /*mean (0.1074), correlation (0.413224)*/, - 5, -13, 6, 10 /*mean (0.109256), correlation (0.408646)*/, - 2, -12, 2, 3 /*mean (0.131691), correlation (0.416076)*/, - 3, 8, 4, -6 /*mean (0.165081), correlation (0.417569)*/, - 2, 6, 12, -13 /*mean (0.171874), correlation (0.408471)*/, - 9, -12, 10, 3 /*mean (0.175146), correlation (0.41296)*/, - -8, 4, -7, 9 /*mean (0.183682), correlation (0.402956)*/, - -11, 12, -4, -6 /*mean (0.184672), correlation (0.416125)*/, - 1, 12, 2, -8 /*mean (0.191487), correlation (0.386696)*/, - 6, -9, 7, -4 /*mean (0.192668), correlation (0.394771)*/, - 2, 3, 3, -2 /*mean (0.200157), correlation (0.408303)*/, - 6, 3, 11, 0 /*mean (0.204588), correlation (0.411762)*/, - 3, -3, 8, -8 /*mean (0.205904), correlation (0.416294)*/, - 7, 8, 9, 3 /*mean (0.213237), correlation (0.409306)*/, - -11, -5, -6, -4 /*mean (0.243444), correlation (0.395069)*/, - -10, 11, -5, 10 /*mean (0.247672), correlation (0.413392)*/, - -5, -8, -3, 12 /*mean (0.24774), correlation (0.411416)*/, - -10, 5, -9, 0 /*mean (0.00213675), correlation (0.454003)*/, - 8, -1, 12, -6 /*mean (0.0293635), correlation (0.455368)*/, - 4, -6, 6, -11 /*mean (0.0404971), correlation (0.457393)*/, - -10, 12, -8, 7 /*mean (0.0481107), correlation (0.448364)*/, - 4, -2, 6, 7 /*mean (0.050641), correlation (0.455019)*/, - -2, 0, -2, 12 /*mean (0.0525978), correlation (0.44338)*/, - -5, -8, -5, 2 /*mean (0.0629667), correlation (0.457096)*/, - 7, -6, 10, 12 /*mean (0.0653846), correlation (0.445623)*/, - -9, -13, -8, -8 /*mean (0.0858749), correlation (0.449789)*/, - -5, -13, -5, -2 /*mean (0.122402), correlation (0.450201)*/, - 8, -8, 9, -13 /*mean (0.125416), correlation (0.453224)*/, - -9, -11, -9, 0 /*mean (0.130128), correlation (0.458724)*/, - 1, -8, 1, -2 /*mean (0.132467), correlation (0.440133)*/, - 7, -4, 9, 1 /*mean (0.132692), correlation (0.454)*/, - -2, 1, -1, -4 /*mean (0.135695), correlation (0.455739)*/, - 11, -6, 12, -11 /*mean (0.142904), correlation (0.446114)*/, - -12, -9, -6, 4 /*mean (0.146165), correlation (0.451473)*/, - 3, 7, 7, 12 /*mean (0.147627), correlation (0.456643)*/, - 5, 5, 10, 8 /*mean (0.152901), correlation (0.455036)*/, - 0, -4, 2, 8 /*mean (0.167083), correlation (0.459315)*/, - -9, 12, -5, -13 /*mean (0.173234), correlation (0.454706)*/, - 0, 7, 2, 12 /*mean (0.18312), correlation (0.433855)*/, - -1, 2, 1, 7 /*mean (0.185504), correlation (0.443838)*/, - 5, 11, 7, -9 /*mean (0.185706), correlation (0.451123)*/, - 3, 5, 6, -8 /*mean (0.188968), correlation (0.455808)*/, - -13, -4, -8, 9 /*mean (0.191667), correlation (0.459128)*/, - -5, 9, -3, -3 /*mean (0.193196), correlation (0.458364)*/, - -4, -7, -3, -12 /*mean (0.196536), correlation (0.455782)*/, - 6, 5, 8, 0 /*mean (0.1972), correlation (0.450481)*/, - -7, 6, -6, 12 /*mean (0.199438), correlation (0.458156)*/, - -13, 6, -5, -2 /*mean (0.211224), correlation (0.449548)*/, - 1, -10, 3, 10 /*mean (0.211718), correlation (0.440606)*/, - 4, 1, 8, -4 /*mean (0.213034), correlation (0.443177)*/, - -2, -2, 2, -13 /*mean (0.234334), correlation (0.455304)*/, - 2, -12, 12, 12 /*mean (0.235684), correlation (0.443436)*/, - -2, -13, 0, -6 /*mean (0.237674), correlation (0.452525)*/, - 4, 1, 9, 3 /*mean (0.23962), correlation (0.444824)*/, - -6, -10, -3, -5 /*mean (0.248459), correlation (0.439621)*/, - -3, -13, -1, 1 /*mean (0.249505), correlation (0.456666)*/, - 7, 5, 12, -11 /*mean (0.00119208), correlation (0.495466)*/, - 4, -2, 5, -7 /*mean (0.00372245), correlation (0.484214)*/, - -13, 9, -9, -5 /*mean (0.00741116), correlation (0.499854)*/, - 7, 1, 8, 6 /*mean (0.0208952), correlation (0.499773)*/, - 7, -8, 7, 6 /*mean (0.0220085), correlation (0.501609)*/, - -7, -4, -7, 1 /*mean (0.0233806), correlation (0.496568)*/, - -8, 11, -7, -8 /*mean (0.0236505), correlation (0.489719)*/, - -13, 6, -12, -8 /*mean (0.0268781), correlation (0.503487)*/, - 2, 4, 3, 9 /*mean (0.0323324), correlation (0.501938)*/, - 10, -5, 12, 3 /*mean (0.0399235), correlation (0.494029)*/, - -6, -5, -6, 7 /*mean (0.0420153), correlation (0.486579)*/, - 8, -3, 9, -8 /*mean (0.0548021), correlation (0.484237)*/, - 2, -12, 2, 8 /*mean (0.0616622), correlation (0.496642)*/, - -11, -2, -10, 3 /*mean (0.0627755), correlation (0.498563)*/, - -12, -13, -7, -9 /*mean (0.0829622), correlation (0.495491)*/, - -11, 0, -10, -5 /*mean (0.0843342), correlation (0.487146)*/, - 5, -3, 11, 8 /*mean (0.0929937), correlation (0.502315)*/, - -2, -13, -1, 12 /*mean (0.113327), correlation (0.48941)*/, - -1, -8, 0, 9 /*mean (0.132119), correlation (0.467268)*/, - -13, -11, -12, -5 /*mean (0.136269), correlation (0.498771)*/, - -10, -2, -10, 11 /*mean (0.142173), correlation (0.498714)*/, - -3, 9, -2, -13 /*mean (0.144141), correlation (0.491973)*/, - 2, -3, 3, 2 /*mean (0.14892), correlation (0.500782)*/, - -9, -13, -4, 0 /*mean (0.150371), correlation (0.498211)*/, - -4, 6, -3, -10 /*mean (0.152159), correlation (0.495547)*/, - -4, 12, -2, -7 /*mean (0.156152), correlation (0.496925)*/, - -6, -11, -4, 9 /*mean (0.15749), correlation (0.499222)*/, - 6, -3, 6, 11 /*mean (0.159211), correlation (0.503821)*/, - -13, 11, -5, 5 /*mean (0.162427), correlation (0.501907)*/, - 11, 11, 12, 6 /*mean (0.16652), correlation (0.497632)*/, - 7, -5, 12, -2 /*mean (0.169141), correlation (0.484474)*/, - -1, 12, 0, 7 /*mean (0.169456), correlation (0.495339)*/, - -4, -8, -3, -2 /*mean (0.171457), correlation (0.487251)*/, - -7, 1, -6, 7 /*mean (0.175), correlation (0.500024)*/, - -13, -12, -8, -13 /*mean (0.175866), correlation (0.497523)*/, - -7, -2, -6, -8 /*mean (0.178273), correlation (0.501854)*/, - -8, 5, -6, -9 /*mean (0.181107), correlation (0.494888)*/, - -5, -1, -4, 5 /*mean (0.190227), correlation (0.482557)*/, - -13, 7, -8, 10 /*mean (0.196739), correlation (0.496503)*/, - 1, 5, 5, -13 /*mean (0.19973), correlation (0.499759)*/, - 1, 0, 10, -13 /*mean (0.204465), correlation (0.49873)*/, - 9, 12, 10, -1 /*mean (0.209334), correlation (0.49063)*/, - 5, -8, 10, -9 /*mean (0.211134), correlation (0.503011)*/, - -1, 11, 1, -13 /*mean (0.212), correlation (0.499414)*/, - -9, -3, -6, 2 /*mean (0.212168), correlation (0.480739)*/, - -1, -10, 1, 12 /*mean (0.212731), correlation (0.502523)*/, - -13, 1, -8, -10 /*mean (0.21327), correlation (0.489786)*/, - 8, -11, 10, -6 /*mean (0.214159), correlation (0.488246)*/, - 2, -13, 3, -6 /*mean (0.216993), correlation (0.50287)*/, - 7, -13, 12, -9 /*mean (0.223639), correlation (0.470502)*/, - -10, -10, -5, -7 /*mean (0.224089), correlation (0.500852)*/, - -10, -8, -8, -13 /*mean (0.228666), correlation (0.502629)*/, - 4, -6, 8, 5 /*mean (0.22906), correlation (0.498305)*/, - 3, 12, 8, -13 /*mean (0.233378), correlation (0.503825)*/, - -4, 2, -3, -3 /*mean (0.234323), correlation (0.476692)*/, - 5, -13, 10, -12 /*mean (0.236392), correlation (0.475462)*/, - 4, -13, 5, -1 /*mean (0.236842), correlation (0.504132)*/, - -9, 9, -4, 3 /*mean (0.236977), correlation (0.497739)*/, - 0, 3, 3, -9 /*mean (0.24314), correlation (0.499398)*/, - -12, 1, -6, 1 /*mean (0.243297), correlation (0.489447)*/, - 3, 2, 4, -8 /*mean (0.00155196), correlation (0.553496)*/, - -10, -10, -10, 9 /*mean (0.00239541), correlation (0.54297)*/, - 8, -13, 12, 12 /*mean (0.0034413), correlation (0.544361)*/, - -8, -12, -6, -5 /*mean (0.003565), correlation (0.551225)*/, - 2, 2, 3, 7 /*mean (0.00835583), correlation (0.55285)*/, - 10, 6, 11, -8 /*mean (0.00885065), correlation (0.540913)*/, - 6, 8, 8, -12 /*mean (0.0101552), correlation (0.551085)*/, - -7, 10, -6, 5 /*mean (0.0102227), correlation (0.533635)*/, - -3, -9, -3, 9 /*mean (0.0110211), correlation (0.543121)*/, - -1, -13, -1, 5 /*mean (0.0113473), correlation (0.550173)*/, - -3, -7, -3, 4 /*mean (0.0140913), correlation (0.554774)*/, - -8, -2, -8, 3 /*mean (0.017049), correlation (0.55461)*/, - 4, 2, 12, 12 /*mean (0.01778), correlation (0.546921)*/, - 2, -5, 3, 11 /*mean (0.0224022), correlation (0.549667)*/, - 6, -9, 11, -13 /*mean (0.029161), correlation (0.546295)*/, - 3, -1, 7, 12 /*mean (0.0303081), correlation (0.548599)*/, - 11, -1, 12, 4 /*mean (0.0355151), correlation (0.523943)*/, - -3, 0, -3, 6 /*mean (0.0417904), correlation (0.543395)*/, - 4, -11, 4, 12 /*mean (0.0487292), correlation (0.542818)*/, - 2, -4, 2, 1 /*mean (0.0575124), correlation (0.554888)*/, - -10, -6, -8, 1 /*mean (0.0594242), correlation (0.544026)*/, - -13, 7, -11, 1 /*mean (0.0597391), correlation (0.550524)*/, - -13, 12, -11, -13 /*mean (0.0608974), correlation (0.55383)*/, - 6, 0, 11, -13 /*mean (0.065126), correlation (0.552006)*/, - 0, -1, 1, 4 /*mean (0.074224), correlation (0.546372)*/, - -13, 3, -9, -2 /*mean (0.0808592), correlation (0.554875)*/, - -9, 8, -6, -3 /*mean (0.0883378), correlation (0.551178)*/, - -13, -6, -8, -2 /*mean (0.0901035), correlation (0.548446)*/, - 5, -9, 8, 10 /*mean (0.0949843), correlation (0.554694)*/, - 2, 7, 3, -9 /*mean (0.0994152), correlation (0.550979)*/, - -1, -6, -1, -1 /*mean (0.10045), correlation (0.552714)*/, - 9, 5, 11, -2 /*mean (0.100686), correlation (0.552594)*/, - 11, -3, 12, -8 /*mean (0.101091), correlation (0.532394)*/, - 3, 0, 3, 5 /*mean (0.101147), correlation (0.525576)*/, - -1, 4, 0, 10 /*mean (0.105263), correlation (0.531498)*/, - 3, -6, 4, 5 /*mean (0.110785), correlation (0.540491)*/, - -13, 0, -10, 5 /*mean (0.112798), correlation (0.536582)*/, - 5, 8, 12, 11 /*mean (0.114181), correlation (0.555793)*/, - 8, 9, 9, -6 /*mean (0.117431), correlation (0.553763)*/, - 7, -4, 8, -12 /*mean (0.118522), correlation (0.553452)*/, - -10, 4, -10, 9 /*mean (0.12094), correlation (0.554785)*/, - 7, 3, 12, 4 /*mean (0.122582), correlation (0.555825)*/, - 9, -7, 10, -2 /*mean (0.124978), correlation (0.549846)*/, - 7, 0, 12, -2 /*mean (0.127002), correlation (0.537452)*/, - -1, -6, 0, -11 /*mean (0.127148), correlation (0.547401)*/ -}; - -// compute the descriptor -void ComputeORB(const cv::Mat &img, vector &keypoints, vector &descriptors) -{ - const int half_patch_size = 8; - const int half_boundary = 16; - int bad_points = 0; - for (auto &kp : keypoints) - { - if (kp.pt.x < half_boundary || kp.pt.y < half_boundary || - kp.pt.x >= img.cols - half_boundary || kp.pt.y >= img.rows - half_boundary) - { - // outside - bad_points++; - descriptors.push_back({}); - continue; - } - - float m01 = 0, m10 = 0; - for (int dx = -half_patch_size; dx < half_patch_size; ++dx) - { - for (int dy = -half_patch_size; dy < half_patch_size; ++dy) - { - uchar pixel = img.at(kp.pt.y + dy, kp.pt.x + dx); - m10 += dx * pixel; - m01 += dy * pixel; - } - } - - // angle should be arc tan(m01/m10); - float m_sqrt = sqrt(m01 * m01 + m10 * m10) + 1e-18; // avoid divide by zero - float sin_theta = m01 / m_sqrt; - float cos_theta = m10 / m_sqrt; - - // compute the angle of this point - DescType desc(8, 0); - for (int i = 0; i < 8; i++) - { - uint32_t d = 0; - for (int k = 0; k < 32; k++) - { - int idx_pq = i * 32 + k; - cv::Point2f p(ORB_pattern[idx_pq * 4], ORB_pattern[idx_pq * 4 + 1]); - cv::Point2f q(ORB_pattern[idx_pq * 4 + 2], ORB_pattern[idx_pq * 4 + 3]); - - // rotate with theta - cv::Point2f pp = cv::Point2f(cos_theta * p.x - sin_theta * p.y, sin_theta * p.x + cos_theta * p.y) + kp.pt; - cv::Point2f qq = cv::Point2f(cos_theta * q.x - sin_theta * q.y, sin_theta * q.x + cos_theta * q.y) + kp.pt; - if (img.at(pp.y, pp.x) < img.at(qq.y, qq.x)) - { - d |= 1 << k; - } - } - desc[i] = d; - } - descriptors.push_back(desc); - } -} - -// brute-force matching -void BfMatch(const vector &desc1, const vector &desc2, vector &matches) -{ - const int d_max = 40; - - for (size_t i1 = 0; i1 < desc1.size(); ++i1) - { - if (desc1[i1].empty()) - continue; - cv::DMatch m{i1, 0, 256}; - for (size_t i2 = 0; i2 < desc2.size(); ++i2) - { - if (desc2[i2].empty()) - continue; - int distance = 0; - for (int k = 0; k < 8; k++) - { - distance += _mm_popcnt_u32(desc1[i1][k] ^ desc2[i2][k]); - } - if (distance < d_max && distance < m.distance) - { - m.distance = distance; - m.trainIdx = i2; - } - } - if (m.distance < d_max) - { - matches.push_back(m); - } - } -} \ No newline at end of file diff --git a/src/system.cpp b/src/system.cpp index 12e5af0..5ea3f37 100644 --- a/src/system.cpp +++ b/src/system.cpp @@ -3,6 +3,9 @@ #include #include #include +#include +#include +#include #include #include #include @@ -36,7 +39,7 @@ namespace ECT_SLAM map_ = Map::Ptr(new Map); viewer_ = Viewer::Ptr(new Viewer); camera_ = Camera::Ptr(new Camera(Config::Get("camera.fx"), Config::Get("camera.fy"), - Config::Get("camera.cx"), Config::Get("camera.cy"))); + Config::Get("camera.cx"), Config::Get("camera.cy"))); frontend_->SetBackend(backend_); frontend_->SetMap(map_); @@ -46,15 +49,25 @@ namespace ECT_SLAM backend_->SetMap(map_); backend_->SetCameras(camera_); - viewer_->SetMap(map_); + if (viewer_) + viewer_->SetMap(map_); return true; } + void System::ProcessImage(std::string & image_path, const double & timestamp){ + cv::Mat image = cv::imread(image_path, 0); + cv::Mat dst; + cv::resize(image, dst, cv::Size(640, 480)); + TrackMonocular(dst, timestamp); + } + cv::Mat System::TrackMonocular(const cv::Mat &im, const double ×tamp) { auto new_frame = Frame::CreateFrame(); - new_frame->img_ = im; + // cv::Ptr clahe = cv::createCLAHE(); + cv::equalizeHist(im, new_frame->img_); + // clahe->apply(src, new_frame->img_); new_frame->time_stamp_ = timestamp; bool success = frontend_->AddFrame(new_frame); return im; @@ -63,13 +76,39 @@ namespace ECT_SLAM void System::Shutdown() { backend_->Stop(); - viewer_->Close(); + if (viewer_) + viewer_->Close(); } void System::SaveKeyFrameTrajectory(const std::string &filename) { - std::cout << std::endl - << "Saving keyframe trajectory to " << filename << " ..." << std::endl; + std::cout << "\nSaving keyframe trajectory to " << filename << " ..." << std::endl; + + Map::KeyframesType keyframes = map_->GetAllKeyFrames(); + + std::ofstream f; + f.open(filename.c_str()); + f << std::fixed; + + for (auto kf : keyframes) + { + double time = kf.second->time_stamp_; + SE3 pose = kf.second->Pose(); + auto se3 = pose.log(); + std::vector rvec{se3(3, 0), se3(4, 0), se3(5, 0)}; + std::vector tvec{se3(0, 0), se3(1, 0), se3(2, 0)}; + + Eigen::Quaterniond qt; + qt = Eigen::AngleAxisd(rvec[0], Eigen::Vector3d::UnitZ()) * + Eigen::AngleAxisd(rvec[1], Eigen::Vector3d::UnitY()) * + Eigen::AngleAxisd(rvec[2], Eigen::Vector3d::UnitX()); + + f << time << " " << tvec[0] << " " << tvec[1] << " " << tvec[2] << " " + << qt.x() << " " << qt.y() << " " << qt.z() << " " << qt.w() << "\n"; + } + + f.close(); + std::cout << "trajectory file close..." << std::endl; } } //namespace ECT_SLAM diff --git a/src/viewer.cpp b/src/viewer.cpp index ce4cade..f18b42b 100644 --- a/src/viewer.cpp +++ b/src/viewer.cpp @@ -6,22 +6,191 @@ #include #include -namespace ECT_SLAM { +namespace ECT_SLAM +{ -Viewer::Viewer() { - viewer_thread_ = std::thread(std::bind(&Viewer::ThreadLoop, this)); -} -void Viewer::ThreadLoop() { - while (viewer_running_) { - usleep(5000); + Viewer::Viewer() + { + viewer_thread_ = std::thread(std::bind(&Viewer::ThreadLoop, this)); } - std::cout << "Stop viewer\n"; -} + void Viewer::AddCurrentFrame(Frame::Ptr current_frame) + { + std::unique_lock lck(viewer_data_mutex_); + current_frame_ = current_frame; + } + + void Viewer::ThreadLoop() + { + pangolin::CreateWindowAndBind("ECT_SLAM", 1024, 768); + glEnable(GL_DEPTH_TEST); + glEnable(GL_BLEND); + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); + + pangolin::OpenGlRenderState vis_camera( + pangolin::ProjectionMatrix(1024, 768, 400, 400, 512, 384, 0.1, 1000), + pangolin::ModelViewLookAt(0, -5, -10, 0, 0, 0, 0.0, -1.0, 0.0)); + + // Add named OpenGL viewport to window and provide 3D Handler + pangolin::View &vis_display = + pangolin::CreateDisplay() + .SetBounds(0.0, 1.0, 0.0, 1.0, -1024.0f / 768.0f) + .SetHandler(new pangolin::Handler3D(vis_camera)); + + const float blue[3] = {0, 0, 1}; + const float green[3] = {0, 1, 0}; + + while (!pangolin::ShouldQuit() && viewer_running_) + { + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + glClearColor(1.0f, 1.0f, 1.0f, 1.0f); + vis_display.Activate(vis_camera); + + std::unique_lock lock(viewer_data_mutex_); + if (current_frame_) + { + DrawFrame(current_frame_, green); + FollowCurrentFrame(vis_camera); + + cv::Mat img = PlotFrameImage(); + cv::imshow("image", img); + cv::waitKey(1); + } + + if (map_) + { + DrawMapPoints(); + } + + pangolin::FinishFrame(); + usleep(5000); + } + std::cout << "Stop viewer\n"; + } + + void Viewer::FollowCurrentFrame(pangolin::OpenGlRenderState &vis_camera) + { + SE3 Twc = current_frame_->Pose().inverse(); + pangolin::OpenGlMatrix m(Twc.matrix()); + vis_camera.Follow(m, true); + } + + cv::Mat Viewer::PlotFrameImage() + { + cv::Mat img_out; + auto cf = current_frame_; + cv::cvtColor(cf->img_, img_out, CV_GRAY2BGR); + + int cnt = 0; + for (size_t i = 0; i < cf->features_.size(); ++i) + { + auto feat = cf->features_[i]; + if (cf->features_[i]->status_ == STATUS::MATCH3D) + { + cv::circle(img_out, feat->position_.pt, 1, cv::Scalar(0, 250, 0), 2); + } + else + { + cnt++; + if(cf->features_[i]->status_ == STATUS::MATCH2D) + cv::circle(img_out, feat->position_.pt, 1, cv::Scalar(0, 0, 250), 2); + else + cv::circle(img_out, feat->position_.pt, 1, cv::Scalar(250, 100, 250), 2); + } + } + + int size = cf->features_.size(); + cv::String text = "features: " + std::to_string(size-cnt) + " / " + std::to_string(size); + cv::putText(img_out, text, + cv::Point(30, 30), cv::FONT_HERSHEY_DUPLEX, + 1.0, CV_RGB(250, 250, 200), 1); + + return img_out; + } -void Viewer::Close() { - viewer_running_ = false; - viewer_thread_.join(); -} + void Viewer::DrawFrame(Frame::Ptr frame, const float *color) + { + SE3 Twc = frame->Pose().inverse(); + const float sz = 1.0; + const int line_width = 2.0; + const float fx = 400; + const float fy = 400; + const float cx = 512; + const float cy = 384; + const float width = 1080; + const float height = 768; + + glPushMatrix(); + + Sophus::Matrix4f m = Twc.matrix().template cast(); + glMultMatrixf((GLfloat *)m.data()); + + if (color == nullptr) + { + glColor3f(1, 0, 0); + } + else + glColor3f(color[0], color[1], color[2]); + + glLineWidth(line_width); + glBegin(GL_LINES); + glVertex3f(0, 0, 0); + glVertex3f(sz * (0 - cx) / fx, sz * (0 - cy) / fy, sz); + glVertex3f(0, 0, 0); + glVertex3f(sz * (0 - cx) / fx, sz * (height - 1 - cy) / fy, sz); + glVertex3f(0, 0, 0); + glVertex3f(sz * (width - 1 - cx) / fx, sz * (height - 1 - cy) / fy, sz); + glVertex3f(0, 0, 0); + glVertex3f(sz * (width - 1 - cx) / fx, sz * (0 - cy) / fy, sz); + + glVertex3f(sz * (width - 1 - cx) / fx, sz * (0 - cy) / fy, sz); + glVertex3f(sz * (width - 1 - cx) / fx, sz * (height - 1 - cy) / fy, sz); + + glVertex3f(sz * (width - 1 - cx) / fx, sz * (height - 1 - cy) / fy, sz); + glVertex3f(sz * (0 - cx) / fx, sz * (height - 1 - cy) / fy, sz); + + glVertex3f(sz * (0 - cx) / fx, sz * (height - 1 - cy) / fy, sz); + glVertex3f(sz * (0 - cx) / fx, sz * (0 - cy) / fy, sz); + + glVertex3f(sz * (0 - cx) / fx, sz * (0 - cy) / fy, sz); + glVertex3f(sz * (width - 1 - cx) / fx, sz * (0 - cy) / fy, sz); + + glEnd(); + glPopMatrix(); + } + + void Viewer::DrawMapPoints() + { + const float red[3] = {1.0, 0, 0}; + for (auto &kf : active_keyframes_) + { + DrawFrame(kf.second, red); + } + + glPointSize(2); + glBegin(GL_POINTS); + for (auto &landmark : active_landmarks_) + { + auto pos = landmark.second->Pos(); + glColor3f(red[0], red[1], red[2]); + glVertex3d(pos[0], pos[1], pos[2]); + } + glEnd(); + } + + void Viewer::Close() + { + viewer_running_ = false; + viewer_thread_.join(); + } + + void Viewer::UpdateMap() + { + std::unique_lock lck(viewer_data_mutex_); + assert(map_ != nullptr); + active_keyframes_ = map_->GetActiveKeyFrames(); + active_landmarks_ = map_->GetActiveMapPoints(); + map_updated_ = true; + } -} // namespace ECT_SLAM +} // namespace ECT_SLAM