Skip to content
Browse files

debugging pipeline

  • Loading branch information...
1 parent 03c018b commit 1db33c88c4fdfc13c3767697d4d95a7eeaa264c9 @royshil committed Aug 1, 2012
View
4 BundleAdjuster.cpp
@@ -62,7 +62,7 @@ namespace
void BundleAdjuster::adjustBundle(vector<CloudPoint>& pointcloud,
const Mat& cam_matrix,
const std::vector<std::vector<cv::KeyPoint> >& imgpts,
- std::map<std::pair<int,int> ,cv::Matx34d>& Pmats
+ std::map<int ,cv::Matx34d>& Pmats
)
{
int N = Pmats.size(), M = pointcloud.size();
@@ -120,7 +120,7 @@ void BundleAdjuster::adjustBundle(vector<CloudPoint>& pointcloud,
Matrix3x3d R;
Vector3d T;
- Matx34d& P = Pmats[make_pair(0, i)];
+ Matx34d& P = Pmats[i];
R[0][0] = P(0,0); R[0][1] = P(0,1); R[0][2] = P(0,2); T[0] = P(0,3);
R[1][0] = P(1,0); R[1][1] = P(1,1); R[1][2] = P(1,2); T[1] = P(1,3);
View
2 BundleAdjuster.h
@@ -16,5 +16,5 @@ class BundleAdjuster {
void adjustBundle(std::vector<CloudPoint>& pointcloud,
const cv::Mat& cam_matrix,
const std::vector<std::vector<cv::KeyPoint> >& imgpts,
- std::map<std::pair<int,int> ,cv::Matx34d>& Pmats);
+ std::map<int ,cv::Matx34d>& Pmats);
};
View
30 CMakeLists.txt
@@ -34,18 +34,24 @@ set(FLTK_LIBRARIES fltk3 fltk3gl)
IF(APPLE)
set( COCOA_LIBS ${CMAKE_OSX_SYSROOT}/System/Library/Frameworks/Cocoa.framework )
- set( CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -F/usr/local/lib -L/opt/local/lib")
+ set( CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -F/usr/local/lib -L/opt/local/lib -fopenmp")
ENDIF(APPLE)
-include_directories(${PCL_INCLUDE_DIRS})
-link_directories(${PCL_LIBRARY_DIRS} ${FLTK_LIB_DIR})
-add_definitions(${PCL_DEFINITIONS})
-include_directories(${FLTK_INCLUDE_DIR})
-
set(SSBA_LIBRARY_DIR SSBA_LIBRARY_DIR-NOT_FOUND CACHE PATH "Directory to find SSBA libs")
-link_directories(${SSBA_LIBRARY_DIR})
-include_directories(${SSBA_LIBRARY_DIR}/../)
+link_directories(
+ ${PCL_LIBRARY_DIRS}
+ ${FLTK_LIB_DIR}
+ ${SSBA_LIBRARY_DIR}
+ )
+
+add_definitions(${PCL_DEFINITIONS})
+
+include_directories(
+ ${FLTK_INCLUDE_DIR}
+ ${PCL_INCLUDE_DIRS}
+ ${SSBA_LIBRARY_DIR}/../
+ )
set( CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D__SFM__DEBUG__" )
@@ -69,11 +75,16 @@ file(GLOB SFM_LIB_SOURCES
add_library(SfMToyLibrary ${SFM_LIB_SOURCES} )
target_link_libraries(SfMToyLibrary
V3D
- COLAMD)
+ COLAMD
+ )
if(MSVC)
set_target_properties(SfMToyLibrary PROPERTIES COMPILE_FLAGS "/openmp")
endif()
+if(APPLE)
+ set_target_properties(SfMToyLibrary PROPERTIES COMPILE_FLAGS "-fopenmp")
+ set_target_properties(SfMToyLibrary PROPERTIES LINK_FLAGS "-fopenmp")
+endif()
# GUI part
IF( NOT ${FLTK-NOTFOUND} )
@@ -103,6 +114,7 @@ target_link_libraries(SfMToyUI
${OpenCV_LIBS}
${PCL_LIBRARIES}
)
+
IF(APPLE)
target_link_libraries(SfMToyUI ${COCOA_LIBS})
View
34 Common.cpp
@@ -12,6 +12,10 @@
#include <opencv2/imgproc/imgproc.hpp>
#include <iostream>
+#ifndef WIN32
+#include <dirent.h>
+#endif
+
using namespace std;
using namespace cv;
@@ -112,15 +116,17 @@ bool hasEndingLower (string const &fullString_, string const &ending)
}
}
-#ifndef WIN32
-//open a directory the POSIX way
-#include <dirent.h>
-
-void open_imgs_dir(char* dir_name, std::vector<cv::Mat>& images, std::vector<std::string>& images_names) {
+void open_imgs_dir(char* dir_name, std::vector<cv::Mat>& images, std::vector<std::string>& images_names, double downscale_factor) {
if (dir_name == NULL) {
return;
}
+
+ string dir_name_ = string(dir_name);
vector<string> files_;
+
+#ifndef WIN32
+//open a directory the POSIX way
+
DIR *dp;
struct dirent *ep;
dp = opendir (dir_name);
@@ -138,24 +144,12 @@ void open_imgs_dir(char* dir_name, std::vector<cv::Mat>& images, std::vector<std
cerr << ("Couldn't open the directory");
return;
}
- for (unsigned int i=0; i<files_.size(); i++) {
- if (files_[i][0] == '.' || !(hasEndingLower(files_[i],"jpg")||hasEndingLower(files_[i],"png"))) {
- continue;
- }
- cv::Mat m_ = cv::imread(string(dir_name) + "/" + files_[i]);
- images_names.push_back(files_[i]);
- images.push_back(m_);
- }
-}
#else
//open a directory the WIN32 way
-void open_imgs_dir(char* dir_name, std::vector<cv::Mat>& images, std::vector<std::string>& images_names, double downscale_factor) {
HANDLE hFind = INVALID_HANDLE_VALUE;
WIN32_FIND_DATA fdata;
- vector<string> files_;
- string dir_name_ = string(dir_name);
if(dir_name_[dir_name_.size()-1] == '\\' || dir_name_[dir_name_.size()-1] == '/') {
dir_name_ = dir_name_.substr(0,dir_name_.size()-1);
}
@@ -193,7 +187,8 @@ void open_imgs_dir(char* dir_name, std::vector<cv::Mat>& images, std::vector<std
FindClose(hFind);
hFind = INVALID_HANDLE_VALUE;
-
+#endif
+
for (unsigned int i=0; i<files_.size(); i++) {
if (files_[i][0] == '.' || !(hasEndingLower(files_[i],"jpg")||hasEndingLower(files_[i],"png"))) {
continue;
@@ -204,5 +199,6 @@ void open_imgs_dir(char* dir_name, std::vector<cv::Mat>& images, std::vector<std
images_names.push_back(files_[i]);
images.push_back(m_);
}
+
+
}
-#endif
View
134 FindCameraMatrices.cpp
@@ -44,14 +44,26 @@ bool CheckCoherentRotation(cv::Mat_<double>& R) {
// cout << "flip right vector" << endl;
// eR.row(0) = -eR.row(0);
//}
+
+ if(fabsf(determinant(R))-1.0 > 1e-09) {
+ cerr << "det(R) != +-1.0, this is not a rotation matrix" << endl;
+ return false;
+ }
+
return true;
}
Mat GetFundamentalMat(const vector<KeyPoint>& imgpts1,
const vector<KeyPoint>& imgpts2,
vector<KeyPoint>& imgpts1_good,
vector<KeyPoint>& imgpts2_good,
- vector<DMatch>& matches) {
+ vector<DMatch>& matches
+#ifdef __SFM__DEBUG__
+ ,const Mat& img_1,
+ const Mat& img_2
+#endif
+ )
+{
//Try to eliminate keypoints based on the fundamental matrix
//(although this is not the proper way to do this)
vector<uchar> status(imgpts1.size());
@@ -91,7 +103,7 @@ Mat GetFundamentalMat(const vector<KeyPoint>& imgpts1,
}
vector<DMatch> new_matches;
- cout << "keeping " << countNonZero(status) << " / " << status.size() << endl;
+ cout << "F keeping " << countNonZero(status) << " / " << status.size() << endl;
for (unsigned int i=0; i<status.size(); i++) {
if (status[i])
{
@@ -127,6 +139,44 @@ Mat GetFundamentalMat(const vector<KeyPoint>& imgpts1,
return F;
}
+void TakeSVDOfE(Mat& E, Mat& svd_u, Mat& svd_vt, Mat& svd_w) {
+#ifndef USE_EIGEN
+ SVD svd(E,SVD::MODIFY_A);
+ svd_u = svd.u;
+ svd_vt = svd.vt;
+ svd_w = svd.w;
+#else
+ cout << "Eigen3 SVD..\n";
+ Eigen::Matrix3f e; e << E(0,0), E(0,1), E(0,2),
+ E(1,0), E(1,1), E(1,2),
+ E(2,0), E(2,1), E(2,2);
+ Eigen::JacobiSVD<Eigen::MatrixXf> svd(e, Eigen::ComputeThinU | Eigen::ComputeThinV);
+ Eigen::MatrixXf Esvd_u = svd.matrixU();
+ Eigen::MatrixXf Esvd_v = svd.matrixV();
+ svd_u = (Mat_<double>(3,3) << Esvd_u(0,0), Esvd_u(0,1), Esvd_u(0,2),
+ Esvd_u(1,0), Esvd_u(1,1), Esvd_u(1,2),
+ Esvd_u(2,0), Esvd_u(2,1), Esvd_u(2,2));
+ Mat_<double> svd_v = (Mat_<double>(3,3) << Esvd_v(0,0), Esvd_v(0,1), Esvd_v(0,2),
+ Esvd_v(1,0), Esvd_v(1,1), Esvd_v(1,2),
+ Esvd_v(2,0), Esvd_v(2,1), Esvd_v(2,2));
+ svd_vt = svd_v.t();
+ svd_w = (Mat_<double>(1,3) << svd.singularValues()[0] , svd.singularValues()[1] , svd.singularValues()[2]);
+#endif
+
+ cout << "----------------------- SVD ------------------------\n";
+ cout << "U:\n"<<svd_u<<"\nW:\n"<<svd_w<<"\nVt:\n"<<svd_vt<<endl;
+ cout << "----------------------------------------------------\n";
+}
+
+bool TestTriangulation(const vector<CloudPoint>& pcloud) {
+ int count = 0;
+ for (int i=0; i<pcloud.size(); i++) {
+ count += pcloud[i].pt.z > 0 ? 1 : 0;
+ }
+ cout << count << "/" << pcloud.size() << " are in front of camera" << endl;
+ return (pcloud.size() - count) < 20; //allow only 20 "outliers"
+}
+
bool FindCameraMatrices(const Mat& K,
const Mat& Kinv,
const vector<KeyPoint>& imgpts1,
@@ -148,39 +198,18 @@ bool FindCameraMatrices(const Mat& K,
cout << "Find camera matrices...";
double t = getTickCount();
- Mat F = GetFundamentalMat(imgpts1,imgpts2,imgpts1_good,imgpts2_good,matches);
+ Mat F = GetFundamentalMat(imgpts1,imgpts2,imgpts1_good,imgpts2_good,matches
+#ifdef __SFM__DEBUG__
+ ,img_1,img_2
+#endif
+ );
//Essential matrix: compute then extract cameras [R|t]
Mat_<double> E = K.t() * F * K; //according to HZ (9.12)
//decompose E to P' , HZ (9.19)
{
-
-#ifndef USE_EIGEN
- SVD svd(E,SVD::MODIFY_A);
- Mat svd_u = svd.u;
- Mat svd_vt = svd.vt;
- Mat svd_w = svd.w;
-#else
- cout << "Eigen3 SVD..\n";
- Eigen::Matrix3f e; e << E(0,0), E(0,1), E(0,2),
- E(1,0), E(1,1), E(1,2),
- E(2,0), E(2,1), E(2,2);
- Eigen::JacobiSVD<Eigen::MatrixXf> svd(e, Eigen::ComputeThinU | Eigen::ComputeThinV);
- Eigen::MatrixXf Esvd_u = svd.matrixU();
- Eigen::MatrixXf Esvd_v = svd.matrixV();
- Mat_<double> svd_u = (Mat_<double>(3,3) << Esvd_u(0,0), Esvd_u(0,1), Esvd_u(0,2),
- Esvd_u(1,0), Esvd_u(1,1), Esvd_u(1,2),
- Esvd_u(2,0), Esvd_u(2,1), Esvd_u(2,2));
- Mat_<double> svd_v = (Mat_<double>(3,3) << Esvd_v(0,0), Esvd_v(0,1), Esvd_v(0,2),
- Esvd_v(1,0), Esvd_v(1,1), Esvd_v(1,2),
- Esvd_v(2,0), Esvd_v(2,1), Esvd_v(2,2));
- Mat svd_vt = svd_v.t();
- Mat_<double> svd_w = (Mat_<double>(1,3) << svd.singularValues()[0] , svd.singularValues()[1] , svd.singularValues()[2]);
-#endif
-
- cout << "----------------------- SVD ------------------------\n";
- cout << "U:\n"<<svd_u<<"\nW:\n"<<svd_w<<"\nVt:\n"<<svd_vt<<endl;
- cout << "----------------------------------------------------\n";
+ Mat svd_u, svd_vt, svd_w;
+ TakeSVDOfE(E,svd_u,svd_vt,svd_w);
//check if first and second singular values are the same (as they should be)
double singular_values_ratio = fabsf(svd_w.at<double>(0) / svd_w.at<double>(1));
@@ -191,6 +220,13 @@ bool FindCameraMatrices(const Mat& K,
return false;
}
+ //according to http://en.wikipedia.org/wiki/Essential_matrix#Properties_of_the_essential_matrix
+ if(fabsf(determinant(E)) > 1e-08) {
+ cout << "det(E) != 0 : " << determinant(E) << "\n";
+ P1 = 0;
+ return false;
+ }
+
Matx33d W(0,-1,0, //HZ 9.13
1,0,0,
0,0,1);
@@ -204,7 +240,16 @@ bool FindCameraMatrices(const Mat& K,
cout << "resulting rotation is not coherent\n";
P1 = 0;
return false;
- }
+ }
+
+ if(determinant(R)+1.0 < 1e-09) {
+ //according to http://en.wikipedia.org/wiki/Essential_matrix#Showing_that_it_is_valid
+ cout << "det(R) == -1 ["<<determinant(R)<<"]: flip E's sign" << endl;
+ E = -E;
+ TakeSVDOfE(E, svd_u, svd_vt, svd_w);
+ R = svd_u * Mat(W) * svd_vt;
+ t = svd_u.col(2);
+ }
P1 = Matx34d(R(0,0), R(0,1), R(0,2), t(0),
R(1,0), R(1,1), R(1,2), t(1),
@@ -213,11 +258,11 @@ bool FindCameraMatrices(const Mat& K,
vector<CloudPoint> pcloud; vector<KeyPoint> corresp;
TriangulatePoints(imgpts1_good, imgpts2_good, Kinv, P, P1, pcloud, corresp);
- Scalar X = mean(CloudPointsToPoints(pcloud));
- cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
+// Scalar X = mean(CloudPointsToPoints(pcloud));
+// cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
//check if pointa are triangulated --in front-- of cameras for all 4 ambiguations
- if (X(2) < 0) {
+ if (!TestTriangulation(pcloud)) {
t = -svd_u.col(2); //-u3
P1 = Matx34d(R(0,0), R(0,1), R(0,2), t(0),
R(1,0), R(1,1), R(1,2), t(1),
@@ -226,10 +271,10 @@ bool FindCameraMatrices(const Mat& K,
pcloud.clear(); corresp.clear();
TriangulatePoints(imgpts1_good, imgpts2_good, Kinv, P, P1, pcloud, corresp);
- X = mean(CloudPointsToPoints(pcloud));
- cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
+// X = mean(CloudPointsToPoints(pcloud));
+// cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
- if (X(2) < 0) {
+ if (!TestTriangulation(pcloud)) {
t = svd_u.col(2); //u3
R = svd_u * Mat(Wt) * svd_vt; //UWtVt
P1 = Matx34d(R(0,0), R(0,1), R(0,2), t(0),
@@ -239,10 +284,10 @@ bool FindCameraMatrices(const Mat& K,
pcloud.clear(); corresp.clear();
TriangulatePoints(imgpts1_good, imgpts2_good, Kinv, P, P1, pcloud, corresp);
- X = mean(CloudPointsToPoints(pcloud));
- cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
+// X = mean(CloudPointsToPoints(pcloud));
+// cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
- if (X(2) < 0) {
+ if (!TestTriangulation(pcloud)) {
t = -svd_u.col(2);//-u3
P1 = Matx34d(R(0,0), R(0,1), R(0,2), t(0),
R(1,0), R(1,1), R(1,2), t(1),
@@ -251,11 +296,12 @@ bool FindCameraMatrices(const Mat& K,
pcloud.clear(); corresp.clear();
TriangulatePoints(imgpts1_good, imgpts2_good, Kinv, P, P1, pcloud, corresp);
- X = mean(CloudPointsToPoints(pcloud));
- cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
+// X = mean(CloudPointsToPoints(pcloud));
+// cout << "Mean :" << X[0] << "," << X[1] << "," << X[2] << "," << X[3] << endl;
- if (X(2) < 0) {
- cout << "Shit." << endl; exit(0);
+ if (!TestTriangulation(pcloud)) {
+ cout << "Shit." << endl;
+ return false;
}
}
}
View
10 FindCameraMatrices.h
@@ -13,13 +13,19 @@
#include "Common.h"
-#undef __SFM__DEBUG__
+//#undef __SFM__DEBUG__
+
+bool CheckCoherentRotation(cv::Mat_<double>& R);
cv::Mat GetFundamentalMat(const std::vector<cv::KeyPoint>& imgpts1,
const std::vector<cv::KeyPoint>& imgpts2,
std::vector<cv::KeyPoint>& imgpts1_good,
std::vector<cv::KeyPoint>& imgpts2_good,
- std::vector<cv::DMatch>& matches);
+ std::vector<cv::DMatch>& matches
+#ifdef __SFM__DEBUG__
+ ,const cv::Mat& = cv::Mat(), const cv::Mat& = cv::Mat()
+#endif
+ );
bool FindCameraMatrices(const cv::Mat& K,
const cv::Mat& Kinv,
View
4 GPUSURFFeatureMatcher.cpp
@@ -49,8 +49,8 @@ GPUSURFFeatureMatcher::GPUSURFFeatureMatcher(vector<cv::Mat>& imgs_,
void GPUSURFFeatureMatcher::MatchFeatures(int idx_i, int idx_j, vector<DMatch>* matches) {
#ifdef __SFM__DEBUG__
- const Mat& img_1 = imgs[idx_i];
- const Mat& img_2 = imgs[idx_j];
+ Mat img_1; imgs[idx_i].download(img_1);
+ Mat img_2; imgs[idx_j].download(img_2);
#endif
const vector<KeyPoint>& imgpts1 = imgpts[idx_i];
const vector<KeyPoint>& imgpts2 = imgpts[idx_j];
View
27 MultiCameraDistance.cpp
@@ -50,9 +50,10 @@ imgs_names(imgs_names_),features_matched(false),use_rich_features(true),use_gpu(
} else {
//no calibration matrix file - mockup calibration
cv::Size imgs_size = imgs_[0].size();
- cam_matrix = (cv::Mat_<double>(3,3) << imgs_size.height , 0 , imgs_size.width/2.0,
- 0, imgs_size.height, imgs_size.height/2.0,
- 0, 0, 1);
+ double max_w_h = MAX(imgs_size.height,imgs_size.width);
+ cam_matrix = (cv::Mat_<double>(3,3) << max_w_h , 0 , imgs_size.width/2.0,
+ 0, max_w_h, imgs_size.height/2.0,
+ 0, 0, 1);
distortion_coeff = cv::Mat_<double>::zeros(1,4);
}
@@ -112,13 +113,13 @@ void MultiCameraDistance::OnlyMatchFeatures(int strategy)
features_matched = true;
}
-bool MultiCameraDistance::CheckCoherentRotation(cv::Mat_<double>& R) {
-// std::cout << "R; " << R << std::endl;
- double s = cv::norm(R,cv::Mat_<double>::eye(3,3),cv::NORM_L1);
-// std::cout << "Distance from I: " << s << std::endl;
- if (s > 2.3) { // norm of R from I is large -> probably bad rotation
- std::cout << "rotation is probably not coherent.." << std::endl;
- return false; //skip triangulation
- }
- return true;
-}
+//bool MultiCameraDistance::CheckCoherentRotation(cv::Mat_<double>& R) {
+//// std::cout << "R; " << R << std::endl;
+// double s = cv::norm(R,cv::Mat_<double>::eye(3,3),cv::NORM_L1);
+//// std::cout << "Distance from I: " << s << std::endl;
+// if (s > 2.3) { // norm of R from I is large -> probably bad rotation
+// std::cout << "rotation is probably not coherent.." << std::endl;
+// return false; //skip triangulation
+// }
+// return true;
+//}
View
6 MultiCameraDistance.h
@@ -30,7 +30,7 @@ class MultiCameraDistance : public IDistance {
std::vector<cv::Mat> imgs;
std::vector<std::string> imgs_names;
- std::map<std::pair<int,int> ,cv::Matx34d> Pmats;
+ std::map<int,cv::Matx34d> Pmats;
cv::Mat K;
cv::Mat_<double> Kinv;
@@ -54,7 +54,7 @@ class MultiCameraDistance : public IDistance {
const std::vector<cv::Vec3b>& getPointCloudRGB() { return pointCloudRGB; }
std::vector<cv::Matx34d> getCameras() {
std::vector<cv::Matx34d> v;
- for(std::map<std::pair<int,int> ,cv::Matx34d>::const_iterator it = Pmats.begin(); it != Pmats.end(); ++it ) {
+ for(std::map<int ,cv::Matx34d>::const_iterator it = Pmats.begin(); it != Pmats.end(); ++it ) {
v.push_back( it->second );
}
return v;
@@ -66,5 +66,5 @@ class MultiCameraDistance : public IDistance {
const std::vector<std::string>& imgs_names_,
const std::string& imgs_path_);
virtual void OnlyMatchFeatures(int strategy = STRATEGY_USE_FEATURE_MATCH);
- bool CheckCoherentRotation(cv::Mat_<double>& R);
+// bool CheckCoherentRotation(cv::Mat_<double>& R);
};
View
272 MultiCameraPnP.cpp
@@ -28,44 +28,55 @@ void MultiCameraPnP::GetBaseLineTriangulation() {
//Reconstruct from two views
bool goodF = false;
- second_view = 0; //start with 0th and 1st views
- while (!goodF && second_view < imgpts.size()) {
- //what if reconstrcution of first two views is bad? fallback to another pair
- second_view++; //go to the next view...
- //See if the Fundamental Matrix between these two views is good
- goodF = FindCameraMatrices(K, Kinv,
- imgpts[0],
- imgpts[second_view],
- imgpts_good[0],
- imgpts_good[second_view],
- P,
- Pmats[std::make_pair(0,second_view)],
- matches_matrix[std::make_pair(0,second_view)],
- tmp_pcloud
+ first_view = second_view = 0; //start with 0th and 1st views
+ while (!goodF && first_view < (imgpts.size()-1)) {
+ second_view = first_view + 1;
+ while (!goodF && second_view < imgpts.size()) {
+ //what if reconstrcution of first two views is bad? fallback to another pair
+ //See if the Fundamental Matrix between these two views is good
+ goodF = FindCameraMatrices(K, Kinv,
+ imgpts[first_view],
+ imgpts[second_view],
+ imgpts_good[first_view],
+ imgpts_good[second_view],
+ P,
+ Pmats[second_view],
+ matches_matrix[std::make_pair(first_view,second_view)],
+ tmp_pcloud
#ifdef __SFM__DEBUG__
- ,imgs[0],imgs[second_view]
+ ,imgs[first_view],imgs[second_view]
#endif
- );
-
+ );
+ if (!goodF) {
+ second_view++; //go to the next view...
+ }
+ }
+ if (!goodF) {
+ first_view++;
+ }
+ }
+ Pmats[first_view] = P;
+
+ if (!goodF) {
+ cerr << "Cannot find a good pair of images to obtain a baseline triangulation" << endl;
+ exit(0);
}
- //TODO: if the P1 matrix is far away from identity rotation - the solution is probably invalid...
- //so use an identity matrix?
-
+ cout << "Taking baseline from " << imgs_names[first_view] << " and " << imgs_names[second_view] << endl;
double reproj_error;
{
std::vector<cv::KeyPoint> pt_set1,pt_set2;
- std::vector<cv::DMatch> matches = matches_matrix[std::make_pair(0,second_view)];
+ std::vector<cv::DMatch> matches = matches_matrix[std::make_pair(first_view,second_view)];
- GetAlignedPointsFromMatch(imgpts[0],imgpts[second_view],matches,pt_set1,pt_set2);
+ GetAlignedPointsFromMatch(imgpts[first_view],imgpts[second_view],matches,pt_set1,pt_set2);
reproj_error = TriangulatePoints(pt_set1,
pt_set2,
Kinv,
P,
- Pmats[std::make_pair(0,second_view)],
+ Pmats[second_view],
pcloud,
correspImg1Pt);
@@ -78,7 +89,7 @@ void MultiCameraPnP::GetBaseLineTriangulation() {
for (unsigned int i=0; i<pcloud.size(); i++) {
pcloud[i].imgpt_for_img = std::vector<int>(imgs.size(),-1);
//matches[i] corresponds to pointcloud[i]
- pcloud[i].imgpt_for_img[0] = matches[i].queryIdx;
+ pcloud[i].imgpt_for_img[first_view] = matches[i].queryIdx;
pcloud[i].imgpt_for_img[second_view] = matches[i].trainIdx;
}
}
@@ -96,112 +107,123 @@ void MultiCameraPnP::RecoverDepthFromImages() {
cv::Matx34d P(1,0,0,0,
0,1,0,0,
0,0,1,0);
- Pmats[std::make_pair(0,0)] = P;
GetBaseLineTriangulation();
- cv::Matx34d P1 = Pmats[std::make_pair(0,second_view)];
+ cv::Matx34d P1 = Pmats[second_view];
cv::Mat_<double> distcoeff(1,4,0.0);
cv::Mat K_32f; K.convertTo(K_32f,CV_32FC1);
cv::Mat distcoeff_32f; distcoeff.convertTo(distcoeff_32f,CV_32FC1);
cv::Mat_<double> t = (cv::Mat_<double>(1,3) << P1(0,3), P1(1,3), P1(2,3));
cv::Mat_<double> R = (cv::Mat_<double>(3,3) << P1(0,0), P1(0,1), P1(0,2),
- P1(1,0), P1(1,1), P1(1,2),
- P1(2,0), P1(2,1), P1(2,2));
+ P1(1,0), P1(1,1), P1(1,2),
+ P1(2,0), P1(2,1), P1(2,2));
cv::Mat_<double> rvec(1,3); Rodrigues(R, rvec);
//loop images to incrementally recover more cameras
- for (unsigned int i=1; i < imgs.size(); i++) {
- if(i==second_view) continue; //baseline is already in cloud...
+ for (unsigned int i=0; i < imgs.size(); i++) {
+ if(i==second_view || i==first_view) continue; //baseline is already in cloud...
- // start scanning previous views
- int view = i-1;
- for (; view >= 0; view--)
- {
- std::cout << "-------------------------- " << imgs_names[view] << " -> " << imgs_names[i] << " --------------------------\n";
+ std::cout << "-------------------------- " << imgs_names[i] << " --------------------------\n";
- std::vector<cv::Point3f> ppcloud;
- std::vector<cv::Point2f> imgPoints;
-
- //prune the match between i and <view> using the Fundamental matrix to prune
- GetFundamentalMat( imgpts[view],
- imgpts[i],
- imgpts_good[view],
- imgpts_good[i],
- matches_matrix[std::make_pair(view,i)]
- );
-
- {
- //check for matches between i'th frame and <view>'th frame (and thus the current cloud)
- std::vector<cv::DMatch> matches = matches_matrix[std::make_pair(view,i)];
- for (unsigned int pt_img_view=0; pt_img_view<matches.size(); pt_img_view++) {
- // the index of the matching point in <view>
- int matches_img0_queryIdx = matches[pt_img_view].queryIdx;
+ std::vector<cv::Point3f> ppcloud;
+ std::vector<cv::Point2f> imgPoints;
- //scan the existing cloud (pcloud) to see if this point from <view> exists
- for (unsigned int pcldp=0; pcldp<pcloud.size(); pcldp++) {
- // see if corresponding point was found in cloud
- if (matches_img0_queryIdx == pcloud[pcldp].imgpt_for_img[view]) {
- //3d point in cloud
- ppcloud.push_back(pcloud[pcldp].pt);
- //2d point in image i
- imgPoints.push_back(imgpts[i][matches[pt_img_view].trainIdx].pt);
-
- break;
- }
+ vector<int> pcloud_status(pcloud.size(),0);
+ for (int old_view = i-1; old_view >= 0; old_view--)
+ {
+ //check for matches between i'th frame and <old_view>'th frame (and thus the current cloud)
+ std::vector<cv::DMatch> matches = matches_matrix[std::make_pair(old_view,i)];
+ for (unsigned int pt_img_view=0; pt_img_view<matches.size(); pt_img_view++) {
+ // the index of the matching point in <view>
+ int matches_img0_queryIdx = matches[pt_img_view].queryIdx;
+
+ //scan the existing cloud (pcloud) to see if this point from <old_view> exists
+ for (unsigned int pcldp=0; pcldp<pcloud.size(); pcldp++) {
+ // see if corresponding point was found in cloud
+ if (matches_img0_queryIdx == pcloud[pcldp].imgpt_for_img[old_view]
+ &&
+ pcloud_status[pcldp] == 0) //prevent duplicates
+ {
+ //3d point in cloud
+ ppcloud.push_back(pcloud[pcldp].pt);
+ //2d point in image i
+ imgPoints.push_back(imgpts[i][matches[pt_img_view].trainIdx].pt);
+
+ pcloud_status[pcldp] = 1;
+ break;
}
}
}
+ }
+ cout << "found " << ppcloud.size() << " 3d-2d point correspondences"<<endl;
- if(ppcloud.size() <= 7 || imgPoints.size() <= 7 || ppcloud.size() != imgPoints.size()) {
- //something went wrong aligning 3D to 2D points..
- cerr << "couldn't find [enough] corresponding cloud points... (only " << ppcloud.size() << ")" <<endl;
- continue;
- }
-
- if(!use_gpu)
- cv::solvePnPRansac(ppcloud, imgPoints, K, distcoeff, rvec, t, false);
-// cv::solvePnP(ppcloud, imgPoints, K, distcoeff, rvec, t, false, CV_EPNP);
- else {
- //use GPU ransac
- //make sure datatstructures are cv::gpu compatible
- cv::Mat ppcloud_m(ppcloud); ppcloud_m = ppcloud_m.t();
- cv::Mat imgPoints_m(imgPoints); imgPoints_m = imgPoints_m.t();
- cv::Mat rvec_,t_;
+ if(ppcloud.size() <= 7 || imgPoints.size() <= 7 || ppcloud.size() != imgPoints.size()) {
+ //something went wrong aligning 3D to 2D points..
+ cerr << "couldn't find [enough] corresponding cloud points... (only " << ppcloud.size() << ")" <<endl;
+ continue;
+ }
+
+ if(!use_gpu)
+ cv::solvePnPRansac(ppcloud, imgPoints, K, distcoeff, rvec, t, true);
+// cv::solvePnP(ppcloud, imgPoints, K, distcoeff, rvec, t, false, CV_EPNP);
+ else {
+ //use GPU ransac
+ //make sure datatstructures are cv::gpu compatible
+ cv::Mat ppcloud_m(ppcloud); ppcloud_m = ppcloud_m.t();
+ cv::Mat imgPoints_m(imgPoints); imgPoints_m = imgPoints_m.t();
+ cv::Mat rvec_,t_;
- cv::gpu::solvePnPRansac(ppcloud_m,imgPoints_m,K_32f,distcoeff_32f,rvec_,t_,false);
-
- rvec_.convertTo(rvec,CV_64FC1);
- t_.convertTo(t,CV_64FC1);
- }
+ cv::gpu::solvePnPRansac(ppcloud_m,imgPoints_m,K_32f,distcoeff_32f,rvec_,t_,false);
- if(cv::norm(t) > 200.0) {
- // this is bad...
- cerr << "estimated camera movement is too big, skip this camera\r\n";
- continue;
- }
+ rvec_.convertTo(rvec,CV_64FC1);
+ t_.convertTo(t,CV_64FC1);
+ }
+
+ if(cv::norm(t) > 200.0) {
+ // this is bad...
+ cerr << "estimated camera movement is too big, skip this camera\r\n";
+ continue;
+ }
- Rodrigues(rvec, R);
- //if(!CheckCoherentRotation(R)) {
- // cerr << "rotation is incoherent. we should try a different base view..." << endl;
- // continue;
- //}
-
- std::cout << "found t = " << t << "\nR = \n"<<R<<std::endl;
-
- P1 = cv::Matx34d(R(0,0),R(0,1),R(0,2),t(0),
- R(1,0),R(1,1),R(1,2),t(1),
- R(2,0),R(2,1),R(2,2),t(2));
+ Rodrigues(rvec, R);
+ if(!CheckCoherentRotation(R)) {
+ cerr << "rotation is incoherent. we should try a different base view..." << endl;
+ continue;
+ }
+
+ std::cout << "found t = " << t << "\nR = \n"<<R<<std::endl;
+
+ P1 = cv::Matx34d(R(0,0),R(0,1),R(0,2),t(0),
+ R(1,0),R(1,1),R(1,2),t(1),
+ R(2,0),R(2,1),R(2,2),t(2));
+
+ //since we are using PnP , all P cam-pose matrices are aligned to the origin 0,0,0 camera...
+ Pmats[i] = P1;
- //since we are using PnP , all P cam-pose matrices are aligned to the origin 0,0,0 camera...
- Pmats[std::make_pair(0,i)] = P1;
+ // start triangulating with previous views
+ for (int view = i-1; view >= 0; view--)
+ {
+ cout << " -> " << imgs_names[view] << endl;
+
//get the left camera matrix
//TODO: potential bug - the P mat for <view> may not exist? or does it...
- P = Pmats[std::make_pair(0,view)];
-
+ P = Pmats[view];
+
+ //prune the match between i and <view> using the Fundamental matrix to prune
+ GetFundamentalMat( imgpts[view],
+ imgpts[i],
+ imgpts_good[view],
+ imgpts_good[i],
+ matches_matrix[std::make_pair(view,i)]
+#ifdef __SFM__DEBUG__
+ ,imgs_orig[view], imgs_orig[i]
+#endif
+ );
+
std::vector<cv::KeyPoint> pt_set1,pt_set2;
std::vector<cv::DMatch> matches = matches_matrix[std::make_pair(view,i)];
GetAlignedPointsFromMatch(imgpts[view],imgpts[i],matches,pt_set1,pt_set2);
@@ -223,7 +245,9 @@ void MultiCameraPnP::RecoverDepthFromImages() {
vector<int> add_to_cloud(new_triangulated.size());
int found_other_views_count = 0;
-#pragma omp parallel for
+
+ //scan new triangulated points, if they were already triangulated before - strengthen cloud
+//#pragma omp parallel for num_threads(1)
for (int j = 0; j<matches.size(); j++) {
new_triangulated[j].imgpt_for_img = std::vector<int>(imgs.size(),-1);
@@ -233,28 +257,28 @@ void MultiCameraPnP::RecoverDepthFromImages() {
new_triangulated[j].imgpt_for_img[view] = matches[j].queryIdx; //2D reference to <view>
new_triangulated[j].imgpt_for_img[i] = matches[j].trainIdx; //2D reference to <i>
bool found_in_other_view = false;
- for (unsigned int view_ = 1; view_ < i; view_++) {
- if(view_ == view) continue;
+ for (unsigned int view_ = 0; view_ < i; view_++) {
+ if(view_ != view) {
+ //Look for points in <view_> that match to points in <i>
+ std::vector<cv::DMatch> submatches = matches_matrix[std::make_pair(view_,i)];
+ for (unsigned int ii = 0; ii < submatches.size(); ii++) {
+ if (submatches[ii].trainIdx == matches[j].trainIdx &&
+ !found_in_other_view)
+ {
+ //Point was already found in <view_> - strengthen it in the known cloud, if it exists there
- //Look for points in <view_> that match to points in <i>
- std::vector<cv::DMatch> submatches = matches_matrix[std::make_pair(view_,i)];
- for (unsigned int ii = 0; ii < submatches.size(); ii++) {
- if (submatches[ii].trainIdx == matches[j].trainIdx &&
- !found_in_other_view)
- {
- //Point was already found in <view_> - strengthen it in the known cloud, if it exists there
-
- //cout << "2d pt " << submatches[ii].queryIdx << " in img " << view_ << " matched 2d pt " << submatches[ii].trainIdx << " in img " << i << endl;
- for (unsigned int pt3d=0; pt3d<pcloud.size(); pt3d++) {
- if (pcloud[pt3d].imgpt_for_img[view_] == submatches[ii].queryIdx)
- {
- //cout << "3d point "<<pt3d<<" in cloud, referenced 2d pt " << submatches[ii].queryIdx << " in view " << view_ << endl;
-#pragma omp critical
+ //cout << "2d pt " << submatches[ii].queryIdx << " in img " << view_ << " matched 2d pt " << submatches[ii].trainIdx << " in img " << i << endl;
+ for (unsigned int pt3d=0; pt3d<pcloud.size(); pt3d++) {
+ if (pcloud[pt3d].imgpt_for_img[view_] == submatches[ii].queryIdx)
{
- pcloud[pt3d].imgpt_for_img[i] = matches[j].trainIdx;
- pcloud[pt3d].imgpt_for_img[view] = matches[j].queryIdx;
- found_in_other_view = true;
- add_to_cloud[j] = 0;
+ //cout << "3d point "<<pt3d<<" in cloud, referenced 2d pt " << submatches[ii].queryIdx << " in view " << view_ << endl;
+#pragma omp critical
+ {
+ pcloud[pt3d].imgpt_for_img[i] = matches[j].trainIdx;
+ pcloud[pt3d].imgpt_for_img[view] = matches[j].queryIdx;
+ found_in_other_view = true;
+ add_to_cloud[j] = 0;
+ }
}
}
}
View
1 MultiCameraPnP.h
@@ -34,6 +34,7 @@ class MultiCameraPnP : public MultiCameraDistance {
private:
void GetBaseLineTriangulation();
+ int first_view;
int second_view; //baseline's second view other to 0
std::vector<CloudPoint> pcloud;
};
View
6 OFFeatureMatcher.cpp
@@ -27,8 +27,8 @@ OFFeatureMatcher::OFFeatureMatcher(std::vector<cv::Mat>& imgs_,
imgpts(imgpts_), imgs(imgs_)
{
//detect keypoints for all images
- //FastFeatureDetector ffd;
- DenseFeatureDetector ffd;
+ FastFeatureDetector ffd;
+// DenseFeatureDetector ffd;
ffd.detect(imgs, imgpts);
}
@@ -80,7 +80,7 @@ void OFFeatureMatcher::MatchFeatures(int idx_i, int idx_j, vector<DMatch>* match
//
//}
-#if 1
+#if 0
#ifdef __SFM__DEBUG__
{
// draw flow field
View
9 Visualization.cpp
@@ -43,7 +43,7 @@ pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud,cloud1,cloud_no_floor,orig_cloud;
////////////////////////////////// Show Camera ////////////////////////////////////
std::deque<pcl::PolygonMesh> cam_meshes;
std::deque<std::vector<Matrix<float,6,1> > > linesToShow;
-HANDLE hIOMutexCam;
+//TODO define mutex
bool bShowCam;
int iCamCounter = 0;
int iLineCounter = 0;
@@ -54,7 +54,7 @@ inline pcl::PointXYZRGB Eigen2PointXYZRGB(Eigen::Vector3f v, Eigen::Vector3f rgb
inline pcl::PointNormal Eigen2PointNormal(Eigen::Vector3f v, Eigen::Vector3f n) { pcl::PointNormal p; p.x=v[0];p.y=v[1];p.z=v[2];p.normal_x=n[0];p.normal_y=n[1];p.normal_z=n[2]; return p;}
inline float* Eigen2float6(Eigen::Vector3f v, Eigen::Vector3f rgb) { static float buf[6]; buf[0]=v[0];buf[1]=v[1];buf[2]=v[2];buf[3]=rgb[0];buf[4]=rgb[1];buf[5]=rgb[2]; return buf; }
inline Matrix<float,6,1> Eigen2Eigen(Vector3f v, Vector3f rgb) { return (Matrix<float,6,1>() << v[0],v[1],v[2],rgb[0],rgb[1],rgb[2]).finished(); }
-inline std::vector<Matrix<float,6,1>> AsVector(const Matrix<float,6,1>& p1, const Matrix<float,6,1>& p2) { std::vector<Matrix<float,6,1>> v(2); v[0] = p1; v[1] = p2; return v; }
+inline std::vector<Matrix<float,6,1> > AsVector(const Matrix<float,6,1>& p1, const Matrix<float,6,1>& p2) { std::vector<Matrix<float,6,1> > v(2); v[0] = p1; v[1] = p2; return v; }
void visualizerShowCamera(const Matrix3f& R, const Vector3f& t, float r, float g, float b, double s = 0.01 /*downscale factor*/) {
@@ -71,8 +71,7 @@ void visualizerShowCamera(const Matrix3f& R, const Vector3f& t, float r, float g
mesh_cld.push_back(Eigen2PointXYZRGB(t + vforward - vright/2.0 + vup/2.0,rgb));
mesh_cld.push_back(Eigen2PointXYZRGB(t + vforward - vright/2.0 - vup/2.0,rgb));
-
- WaitForSingleObject( hIOMutexCam, INFINITE );
+ //TODO Mutex acquire
pcl::PolygonMesh pm;
pm.polygons.resize(6);
for(int i=0;i<6;i++)
@@ -81,7 +80,7 @@ void visualizerShowCamera(const Matrix3f& R, const Vector3f& t, float r, float g
pcl::toROSMsg(mesh_cld,pm.cloud);
bShowCam = true;
cam_meshes.push_back(pm);
- ReleaseMutex( hIOMutexCam );
+ //TODO mutex release
linesToShow.push_back(
AsVector(Eigen2Eigen(t,rgb),Eigen2Eigen(t + vforward*3.0,rgb))
View
7 main.cpp
@@ -77,18 +77,21 @@ int main(int argc, char** argv) {
{
vector<cv::Point3d> cld = distance->getPointCloud();
cv::Mat_<double> cldm(cld.size(),3);
- for(int i=0;i<cld.size();i++) {
+ for(unsigned int i=0;i<cld.size();i++) {
cldm.row(i)(0) = cld[i].x;
cldm.row(i)(1) = cld[i].y;
cldm.row(i)(2) = cld[i].z;
}
cv::Mat_<double> mean;
cv::PCA pca(cldm,mean,CV_PCA_DATA_AS_ROW);
scale_cameras_down = pca.eigenvalues.at<double>(0);
+ if (scale_cameras_down > 1.0) {
+ scale_cameras_down = 1.0/scale_cameras_down;
+ }
}
vector<cv::Matx34d> v = distance->getCameras();
- for(int i=0;i<v.size();i++) {
+ for(unsigned int i=0;i<v.size();i++) {
cv::Matx33f R;
R(0,0)=v[i](0,0); R(0,1)=v[i](0,1); R(0,2)=v[i](0,2);
R(1,0)=v[i](1,0); R(1,1)=v[i](1,1); R(1,2)=v[i](1,2);

0 comments on commit 1db33c8

Please sign in to comment.
Something went wrong with that request. Please try again.