Skip to content

Commit

Permalink
Improvements on multi image reconstruction in progress.
Browse files Browse the repository at this point in the history
  • Loading branch information
phg1024 committed Feb 5, 2016
1 parent 3442159 commit fb9a60e
Show file tree
Hide file tree
Showing 2 changed files with 75 additions and 19 deletions.
27 changes: 24 additions & 3 deletions multiimagereconstructor.h
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,6 @@ class MultiImageReconstructor {

template <typename Constraint>
bool MultiImageReconstructor<Constraint>::Reconstruct() {
// TODO Work on this function.

// Initialize the parameter sets
param_sets.resize(image_points_pairs.size());
for(size_t i=0;i<param_sets.size();++i) {
Expand Down Expand Up @@ -147,7 +145,12 @@ bool MultiImageReconstructor<Constraint>::Reconstruct() {
// 3. Convergence test. If not converged, goto step 1.
const int max_iters_main_loop = 3;
int iters_main_loop = 0;

vector<MatrixXd> identity_weights_history;
vector<VectorXd> identity_weights_centroid_history;

while(iters_main_loop++ < 3){

// Single image reconstruction step
for(size_t i=0;i<num_images;++i) {
single_recon.SetMesh(param_sets[i].mesh);
Expand Down Expand Up @@ -193,6 +196,8 @@ bool MultiImageReconstructor<Constraint>::Reconstruct() {
identity_weights.col(i) = param_sets[i].model.Wid;
}

identity_weights_history.push_back(identity_weights);

// Remove outliers
vector<int> consistent_set = StatsUtils::FindConsistentSet(identity_weights, 0.5);
assert(consistent_set.size() > 0);
Expand Down Expand Up @@ -316,19 +321,35 @@ bool MultiImageReconstructor<Constraint>::Reconstruct() {

// Update the identity weights
for(auto& param : param_sets) {
param.model.Wid = identity_centroid;
param.model.Wid = params;

// Also update geometry if needed
{
model.ApplyWeights(param.model.Wid, param.model.Wexp);
param.mesh.UpdateVertices(model.GetTM());
param.mesh.ComputeNormals();
}
}

identity_weights_centroid_history.push_back(params);
}
}
} // end of main reconstruction loop

// Output the reconstructed identity weights
{
for(int i=0;i<identity_weights_history.size();++i) {
ofstream fout("identity_matrix" + std::to_string(i) + ".txt");
fout << identity_weights_history[i];
fout.close();
}

for(int i=0;i<identity_weights_centroid_history.size();++i) {
ofstream fout("identity_centroid" + std::to_string(i) + ".txt");
fout << identity_weights_centroid_history[i];
fout.close();
}
}

// Visualize the final reconstruction results
for(size_t i=0;i<num_images;++i) {
Expand Down
67 changes: 51 additions & 16 deletions statsutils.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,26 +63,61 @@ static MatrixXd normalize(const MatrixXd& mat) {
return normalized_mat;
}

static vector<int> FindConsistentSet(const MatrixXd& identity_weights,
double radius) {
#if 1
// Compute Pearson's correlation among identity weights
MatrixXd metric_mat = StatsUtils::corr(identity_weights);
#else
// Compute normalized Eucledian distance among identity weights
MatrixXd metric_mat = MatrixXd::Ones(num_images, num_images) -
StatsUtils::normalize(StatsUtils::dist(identity_weights.transpose()));
#endif
static vector<int> FindConsistentSet(const MatrixXd& x, double h, int k,
VectorXd* centroid_out=nullptr) {
// Meanshift until converged
int ndims = x.rows(), nsamples = x.cols();
MatrixXd m(ndims, nsamples);
MatrixXd y = x;
const double th = 1e-6;
const int max_iters = 100;
bool done = false;

int iters = 0;
double ms = 0;
while(!done && iters < max_iters) {
for(int i=0;i<nsamples;++i) {
double gsum = 0;
VectorXd yi = VectorXd::Zeros(ndims);
for(int j=0;j<nsamples;++j) {
if(j==i) continue;
else {
VectorXd dj = (y.col(i) - x.col(j))/h;
double gj = exp(-dj.transpose() * dj);
gsum += gj;
yi += gj * x.col(j);
}
}
m.col(i) = yi / (gsum + 1e-8);
}

ms = m.maxCoeff();

// Pick a coherent subset
if(ms < th) {
cout << "ms = " << ms << endl;
done = true;
} else {
y = m;
++iters;
cout << "iteration " << iters << ": " << ms << endl;
}
}

// Find the highest density cluster, compute its centroid
vector<double> d(nsamples, 0);
for(int i=0;i<nsamples;++i) {
for(int j=0;j<nsamples;++j) {
d[i] += exp(-(m.col(i) - m.col(j)).squaredNorm());
}
}

// Compute the centroid of the coherent subset
if(centroid_out != nullptr) {
// Write the centroid to the output
}

// Find the consistent set using the centroid and radius
// Compute the distance between the centroid and each input point

// @TODO Use the input set for the moment. Need to work on this later.
vector<int> consistent_set;
for(int i=0;i<identity_weights.cols();++i) consistent_set.push_back(i);
// Pick the k nearest points as the consistent set

return consistent_set;
}
Expand Down

0 comments on commit fb9a60e

Please sign in to comment.