Permalink
Browse files

added option to compile without SSBA and using OpenCV's contrib bundl…

…e adjuster. see option USE_SSBA in the CMakeLists.txt
  • Loading branch information...
1 parent d651432 commit 018e85ea274729f5499611b7cea024eef86cb66f @royshil committed Feb 2, 2013
View
@@ -46,6 +46,8 @@ link_directories(
add_definitions(${PCL_DEFINITIONS})
+set( CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D__SFM__DEBUG__" )
+
include_directories(
${FLTK_INCLUDE_DIR}
${PCL_INCLUDE_DIRS}
View
@@ -68,7 +68,11 @@ int main( int argc, char** argv )
// kpts2.push_back(KeyPoint(pts2[i],1));
// }
// }
- GetFundamentalMat(keypoints_1, keypoints_2, kpts1, kpts2, Fmatches);
+ GetFundamentalMat(keypoints_1, keypoints_2, kpts1, kpts2, Fmatches
+#ifdef __SFM__DEBUG__
+ , img_1, img_2
+#endif
+ );
drawMatches(img_1, keypoints_1, img_2, keypoints_2, Fmatches, img_keypoints, Scalar::all(-1), Scalar::all(-1), Mat(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
}
@@ -10,15 +10,21 @@
#include "BundleAdjuster.h"
#include "Common.h"
+using namespace cv;
+using namespace std;
+
+#ifndef HAVE_SSBA
+#include <opencv2/contrib/contrib.hpp>
+#endif
+
+#ifdef HAVE_SSBA
#define V3DLIB_ENABLE_SUITESPARSE
#include <Math/v3d_linear.h>
#include <Base/v3d_vrmlio.h>
#include <Geometry/v3d_metricbundle.h>
using namespace V3D;
-using namespace std;
-using namespace cv;
namespace
{
@@ -58,6 +64,8 @@ namespace
}
} // end namespace <>
+#endif
+
//count number of 2D measurements
int BundleAdjuster::Count2DMeasurements(const vector<CloudPoint>& pointcloud) {
int K = 0;
@@ -81,6 +89,12 @@ void BundleAdjuster::adjustBundle(vector<CloudPoint>& pointcloud,
cout << "N (cams) = " << N << " M (points) = " << M << " K (measurements) = " << K << endl;
+#ifdef HAVE_SSBA
+ /********************************************************************************/
+ /* Use SSBA-3.0 for sparse bundle adjustment */
+ /********************************************************************************/
+
+
StdDistortionFunction distortion;
//conver camera intrinsics to BA datastructs
@@ -261,4 +275,98 @@ void BundleAdjuster::adjustBundle(vector<CloudPoint>& pointcloud,
cam_matrix.at<double>(1,1) = Knew[1][1];
cam_matrix.at<double>(1,2) = Knew[1][2];
}
+#else
+ /********************************************************************************/
+ /* Use OpenCV contrib module for sparse bundle adjustment */
+ /********************************************************************************/
+
+ vector<Point3d> points(M); // positions of points in global coordinate system (input and output)
+ vector<vector<Point2d> > imagePoints(N,vector<Point2d>(M)); // projections of 3d points for every camera
+ vector<vector<int> > visibility(N,vector<int>(M)); // visibility of 3d points for every camera
+ vector<Mat> cameraMatrix(N); // intrinsic matrices of all cameras (input and output)
+ vector<Mat> R(N); // rotation matrices of all cameras (input and output)
+ vector<Mat> T(N); // translation vector of all cameras (input and output)
+ vector<Mat> distCoeffs(N); // distortion coefficients of all cameras (input and output)
+
+ int num_global_cams = pointcloud[0].imgpt_for_img.size();
+ vector<int> global_cam_id_to_local_id(num_global_cams,-1);
+ vector<int> local_cam_id_to_global_id(N,-1);
+ int local_cam_count = 0;
+
+ for (int pt3d = 0; pt3d < pointcloud.size(); pt3d++) {
+ points[pt3d] = pointcloud[pt3d].pt;
+// imagePoints[pt3d].resize(N);
+// visibility[pt3d].resize(N);
+
+ for (int pt3d_img = 0; pt3d_img < num_global_cams; pt3d_img++) {
+ if (pointcloud[pt3d].imgpt_for_img[pt3d_img] >= 0) {
+ if (global_cam_id_to_local_id[pt3d_img] < 0)
+ {
+ local_cam_id_to_global_id[local_cam_count] = pt3d_img;
+ global_cam_id_to_local_id[pt3d_img] = local_cam_count++;
+ }
+
+ int local_cam_id = global_cam_id_to_local_id[pt3d_img];
+
+ //2d point
+ Point2d pt2d_for_pt3d_in_img = imgpts[pt3d_img][pointcloud[pt3d].imgpt_for_img[pt3d_img]].pt;
+ imagePoints[local_cam_id][pt3d] = pt2d_for_pt3d_in_img;
+
+ //visibility in this camera
+ visibility[local_cam_id][pt3d] = 1;
+ }
+ }
+ //2nd pass to mark not-founds
+ for (int pt3d_img = 0; pt3d_img < num_global_cams; pt3d_img++) {
+ if (pointcloud[pt3d].imgpt_for_img[pt3d_img] < 0) {
+ //see if this global camera is being used locally in the BA
+ vector<int>::iterator local_it = std::find(local_cam_id_to_global_id.begin(),local_cam_id_to_global_id.end(),pt3d_img);
+ if (local_it != local_cam_id_to_global_id.end()) {
+ //this camera is used, and its local id is:
+ int local_id = local_it - local_cam_id_to_global_id.begin();
+
+ if (local_id >= 0) {
+ imagePoints[local_id][pt3d] = Point2d(-1,-1); //TODO reproject point on this camera
+ visibility[local_id][pt3d] = 0;
+ }
+ }
+ }
+ }
+ }
+ for (int i=0; i<N; i++) {
+ cameraMatrix[i] = cam_matrix;
+
+ Matx34d& P = Pmats[local_cam_id_to_global_id[i]];
+
+ Mat_<double> camR(3,3),camT(3,1);
+ camR(0,0) = P(0,0); camR(0,1) = P(0,1); camR(0,2) = P(0,2); camT(0) = P(0,3);
+ camR(1,0) = P(1,0); camR(1,1) = P(1,1); camR(1,2) = P(1,2); camT(1) = P(1,3);
+ camR(2,0) = P(2,0); camR(2,1) = P(2,1); camR(2,2) = P(2,2); camT(2) = P(2,3);
+ R[i] = camR;
+ T[i] = camT;
+
+ distCoeffs[i] = Mat::zeros(4,1, CV_64FC1);
+ }
+
+ cout << "Adjust bundle... \n";
+ cv::LevMarqSparse::bundleAdjust(points,imagePoints,visibility,cameraMatrix,R,T,distCoeffs);
+ cout << "DONE\n";
+
+ //get the BAed points
+ for (int pt3d = 0; pt3d < pointcloud.size(); pt3d++) {
+ pointcloud[pt3d].pt = points[pt3d];
+ }
+
+ //get the BAed cameras
+ for (int i = 0; i < N; ++i)
+ {
+ Matx34d P;
+ P(0,0) = R[i].at<double>(0,0); P(0,1) = R[i].at<double>(0,1); P(0,2) = R[i].at<double>(0,2); P(0,3) = T[i].at<double>(0);
+ P(1,0) = R[i].at<double>(1,0); P(1,1) = R[i].at<double>(1,1); P(1,2) = R[i].at<double>(1,2); P(1,3) = T[i].at<double>(1);
+ P(2,0) = R[i].at<double>(2,0); P(2,1) = R[i].at<double>(2,1); P(2,2) = R[i].at<double>(2,2); P(2,3) = T[i].at<double>(2);
+
+ Pmats[local_cam_id_to_global_id[i]] = P;
+ }
+
+#endif
}
View
@@ -7,15 +7,20 @@ if(EIGEN_INCLUDE_DIRS) # if compiling with PCL, it will bring Eigen with it
endif()
set(SSBA_LIBRARY_DIR "${CMAKE_SOURCE_DIR}/../3rdparty/SSBA-3.0/build" CACHE PATH "Directory to find SSBA libs")
+set(USE_SSBA ON CACHE BOOL "Build using the SSBA-3.0 library?")
-link_directories(
- ${SSBA_LIBRARY_DIR}
- )
+if(USE_SSBA)
+ link_directories(
+ ${SSBA_LIBRARY_DIR}
+ )
-include_directories(
- ${SSBA_LIBRARY_DIR}/../
- )
+ include_directories(
+ ${SSBA_LIBRARY_DIR}/../
+ )
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DHAVE_SSBA")
+endif(USE_SSBA)
+set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-sign-compare")
set( CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D__SFM__DEBUG__" )
# Core functions of SfM
@@ -40,15 +45,18 @@ add_library(SfMToyLibrary ${SFM_LIB_SOURCES} )
if(MSVC)
set_target_properties(SfMToyLibrary PROPERTIES COMPILE_FLAGS "/openmp")
- target_link_libraries(SfMToyLibrary V3D COLAMD )
+ if(USE_SSBA)
+ target_link_libraries(SfMToyLibrary V3D COLAMD )
+ endif(USE_SSBA)
endif()
if(APPLE)
- # default is to build SSBA as static libs...
- target_link_libraries(SfMToyLibrary
- "${SSBA_LIBRARY_DIR}/libV3D.a"
- "${SSBA_LIBRARY_DIR}/libCOLAMD.a"
- )
-
+ if(USE_SSBA)
+ # default is to build SSBA as static libs...
+ target_link_libraries(SfMToyLibrary
+ "${SSBA_LIBRARY_DIR}/libV3D.a"
+ "${SSBA_LIBRARY_DIR}/libCOLAMD.a"
+ )
+ endif(USE_SSBA)
set_target_properties(SfMToyLibrary PROPERTIES COMPILE_FLAGS "-fopenmp")
set_target_properties(SfMToyLibrary PROPERTIES LINK_FLAGS "-fopenmp")
endif()
@@ -365,9 +365,9 @@ bool FindCameraMatrices(const Mat& K,
double t = getTickCount();
Mat F = GetFundamentalMat(imgpts1,imgpts2,imgpts1_good,imgpts2_good,matches
-//#ifdef __SFM__DEBUG__
-// ,img_1,img_2
-//#endif
+#ifdef __SFM__DEBUG__
+ ,img_1,img_2
+#endif
);
if(matches.size() < 100) { // || ((double)imgpts1_good.size() / (double)imgpts1.size()) < 0.25
cerr << "not enough inliers after F matrix" << endl;
@@ -24,7 +24,7 @@ cv::Mat GetFundamentalMat( const std::vector<cv::KeyPoint>& imgpts1,
std::vector<cv::KeyPoint>& imgpts2_good,
std::vector<cv::DMatch>& matches
#ifdef __SFM__DEBUG__
- ,const Mat& img_1, const Mat& img_2
+ ,const cv::Mat& img_1, const cv::Mat& img_2
#endif
);
@@ -109,10 +109,12 @@ void MultiCameraPnP::GetBaseLineTriangulation() {
Pmats[m_second_view] = 0;
m_second_view++;
} else {
+ assert(new_triangulated[0].imgpt_for_img.size() > 0);
std::cout << "before triangulation: " << pcloud.size();
for (unsigned int j=0; j<add_to_cloud.size(); j++) {
- if(add_to_cloud[j] == 1)
+ if(add_to_cloud[j] == 1) {
pcloud.push_back(new_triangulated[j]);
+ }
}
std::cout << " after " << pcloud.size() << std::endl;
}
@@ -346,14 +348,21 @@ bool MultiCameraPnP::TriangulatePointsBetweenViews(
cout << "filtered out " << (new_triangulated.size() - new_triangulated_filtered.size()) << " high-error points" << endl;
- //all points filtered?
+ //all points filtered out?
if(new_triangulated_filtered.size() <= 0) return false;
- new_triangulated = new_triangulated_filtered;
-
+ //use filtered points now
+ new_triangulated.clear();
+ new_triangulated.insert(new_triangulated.begin(), new_triangulated_filtered.begin(), new_triangulated_filtered.end());
+ //use filtered matches
matches = new_matches;
+
+ //update the matches storage
matches_matrix[std::make_pair(older_view,working_view)] = new_matches; //just to make sure, remove if unneccesary
matches_matrix[std::make_pair(working_view,older_view)] = FlipMatches(new_matches);
+
+ //now, determine which points should be added to the cloud
+
add_to_cloud.clear();
add_to_cloud.resize(new_triangulated.size(),1);
int found_other_views_count = 0;
@@ -362,13 +371,14 @@ bool MultiCameraPnP::TriangulatePointsBetweenViews(
//scan new triangulated points, if they were already triangulated before - strengthen cloud
//#pragma omp parallel for num_threads(1)
for (int j = 0; j<new_triangulated.size(); j++) {
- new_triangulated[j].imgpt_for_img = std::vector<int>(imgs.size(),-1);
+ new_triangulated[j].imgpt_for_img.resize(imgs.size(),-1);
//matches[j] corresponds to new_triangulated[j]
//matches[j].queryIdx = point in <older_view>
//matches[j].trainIdx = point in <working_view>
new_triangulated[j].imgpt_for_img[older_view] = matches[j].queryIdx; //2D reference to <older_view>
new_triangulated[j].imgpt_for_img[working_view] = matches[j].trainIdx; //2D reference to <working_view>
+
bool found_in_other_view = false;
for (unsigned int view_ = 0; view_ < num_views; view_++) {
if(view_ != older_view) {

0 comments on commit 018e85e

Please sign in to comment.