Skip to content

Commit

Permalink
Rewrite OFF import (#4960)
Browse files Browse the repository at this point in the history
  • Loading branch information
mmuman committed Feb 16, 2024
1 parent dd2da9e commit ded24c5
Showing 1 changed file with 229 additions and 17 deletions.
246 changes: 229 additions & 17 deletions src/io/import_off.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,236 @@
#include "PolySet.h"
#include "printutils.h"
#include "AST.h"
#ifdef ENABLE_CGAL
#include "cgalutils.h"
#endif
#include <fstream>
#include <boost/regex.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/split.hpp>

// References:
// http://www.geomview.org/docs/html/OFF.html

std::unique_ptr<PolySet> import_off(const std::string& filename, const Location& loc)
{
#ifdef ENABLE_CGAL
CGAL_Polyhedron poly;
std::ifstream file(filename.c_str(), std::ios::in | std::ios::binary);
if (!file.good()) {
LOG(message_group::Warning, "Can't open import file '%1$s', import() at line %2$d", filename, loc.firstLine());
} else {
file >> poly;
file.close();
return CGALUtils::createPolySetFromPolyhedron(poly);
}
#else
LOG(message_group::Warning, "OFF import requires CGAL, import() at line %2$d", filename, loc.firstLine());
#endif // ifdef ENABLE_CGAL
return std::make_unique<PolySet>(3);
boost::regex ex_magic(R"(^(ST)?(C)?(N)?(4)?(n)?OFF( BINARY)? *)");
// XXX: are ST C N always in order?
boost::regex ex_cr(R"(\r$)");
boost::regex ex_comment(R"(\s*#.*$)");
boost::smatch results;

std::ifstream f(filename.c_str(), std::ios::in | std::ios::binary);

int lineno = 0;
std::string line;

auto AsciiError = [&](const auto& errstr){
LOG(message_group::Error, loc, "",
"OFF File line %1$s, %2$s line '%3$s' importing file '%4$s'",
lineno, errstr, line, filename);
};

auto getline_clean = [&](const auto& errstr){
do {
lineno++;
std::getline(f, line);
if (line.empty() && f.eof()) {
AsciiError(errstr);
return false;
}
// strip DOS line endings
if (boost::regex_search(line, results, ex_cr)) {
line = line.erase(results.position(), results[0].length());
}
// strip comments
if (boost::regex_search(line, results, ex_comment)) {
line = line.erase(results.position(), results[0].length());
}
boost::trim(line);
} while (line.empty());

return true;
};

if (!f.good()) {
AsciiError("File error");
return std::make_unique<PolySet>(3);
}

bool got_magic = false;
// defaults
bool has_normals = false;
bool has_color = false;
bool has_textures = false;
bool has_ndim = false;
bool is_binary = false;
unsigned int dimension = 3;

if (line.empty() && !getline_clean("bad header: end of file")) {
return std::make_unique<PolySet>(3);
}

if (boost::regex_search(line, results, ex_magic) > 0) {
got_magic = true;
// Remove the matched part, we might have numbers next.
line = line.erase(0, results[0].length());
has_normals = results[3].matched;
has_color = results[2].matched;
has_textures = results[1].matched;
is_binary = results[6].matched;
if (results[4].matched)
dimension = 4;
has_ndim = results[5].matched;
}

// TODO: handle binary format
if (is_binary) {
AsciiError("binary OFF format not supported");
return std::make_unique<PolySet>(3);
}

std::vector<std::string> words;

if (has_ndim) {
if (line.empty() && !getline_clean("bad header: end of file")) {
return std::make_unique<PolySet>(3);
}
boost::split(words, line, boost::is_any_of(" \t"), boost::token_compress_on);
if (f.eof() || words.size() < 1) {
AsciiError("bad header: missing Ndim");
return std::make_unique<PolySet>(3);
}
line = line.erase(0, words[0].length() + ((words.size() > 1) ? 1 : 0));
try {
dimension = boost::lexical_cast<unsigned int>(words[0]) + dimension - 3;
} catch (const boost::bad_lexical_cast& blc) {
AsciiError("bad header: bad data for Ndim");
return std::make_unique<PolySet>(3);
}
}

PRINTDB("Header flags: N:%d C:%d ST:%d Ndim:%d B:%d", has_normals % has_color % has_textures % dimension % is_binary);

if (dimension != 3) {
AsciiError((boost::format("unhandled vertex dimensions (%d)") % dimension).str().c_str());
return std::make_unique<PolySet>(3);
}

if (line.empty() && !getline_clean("bad header: end of file")) {
return std::make_unique<PolySet>(3);
}

boost::split(words, line, boost::is_any_of(" \t"), boost::token_compress_on);
if (f.eof() || words.size() < 3) {
AsciiError("bad header: missing data");
return std::make_unique<PolySet>(3);
}

unsigned long vertices_count;
unsigned long faces_count;
unsigned long edges_count;
unsigned long vertex = 0;
unsigned long face = 0;
try {
vertices_count = boost::lexical_cast<unsigned long>(words[0]);
faces_count = boost::lexical_cast<unsigned long>(words[1]);
edges_count = boost::lexical_cast<unsigned long>(words[2]);
(void)edges_count; // ignored
} catch (const boost::bad_lexical_cast& blc) {
AsciiError("bad header: bad data");
return std::make_unique<PolySet>(3);
}

if (f.eof() || vertices_count < 1 || faces_count < 1) {
AsciiError("bad header: not enough data");
return std::make_unique<PolySet>(3);
}

PRINTDB("%d vertices, %d faces, %d edges.", vertices_count % faces_count % edges_count);

auto ps = std::make_unique<PolySet>(3);
ps->vertices.reserve(vertices_count);
ps->indices.reserve(faces_count);

while ((!f.eof()) && (vertex++ < vertices_count)) {
if (!getline_clean("reading vertices: end of file")) {
return std::make_unique<PolySet>(3);
}

boost::split(words, line, boost::is_any_of(" \t"), boost::token_compress_on);
if (words.size() < 3) {
AsciiError("can't parse vertex: not enough data");
return std::make_unique<PolySet>(3);
}

try {
Vector3d v = {0, 0, 0};
int i;
for (i = 0; i < dimension; i++) {
v[i]= boost::lexical_cast<double>(words[i]);
}
//PRINTDB("Vertex[%ld] = { %f, %f, %f }", vertex % v[0] % v[1] % v[2]);
if (has_normals) {
; // TODO words[i++]
i += 0;
}
if (has_color) {
; // TODO: Meshlab appends color there, probably to allow gradients
i += 3; // 4?
}
if (has_textures) {
; // TODO words[i++]
}
ps->vertices.push_back(v);
} catch (const boost::bad_lexical_cast& blc) {
AsciiError("can't parse vertex: bad data");
return std::make_unique<PolySet>(3);
}
}

while (!f.eof() && (face++ < faces_count)) {
if (!getline_clean("reading faces: end of file")) {
return std::make_unique<PolySet>(3);
}

boost::split(words, line, boost::is_any_of(" \t"), boost::token_compress_on);
if (words.size() < 1) {
AsciiError("can't parse face: not enough data");
return std::make_unique<PolySet>(3);
}

try {
unsigned long face_size=boost::lexical_cast<unsigned long>(words[0]);
unsigned long i;
if (words.size() - 1 < face_size) {
AsciiError("can't parse face: missing indices");
return std::make_unique<PolySet>(3);
}
ps->indices.emplace_back().reserve(face_size);
//PRINTDB("Index[%d] [%d] = { ", face % n);
for (i = 0; i < face_size; i++) {
int ind=boost::lexical_cast<int>(words[i+1]);
//PRINTDB("%d, ", ind);
if (ind >= 0 && ind < vertices_count) {
ps->indices.back().push_back(ind);
} else {
AsciiError((boost::format("ignored bad face vertex index: %d") % ind).str().c_str());
}
}
//PRINTD("}");
if (words.size() >= face_size + 4) {
// TODO: handle optional color info
/*
int r=boost::lexical_cast<int>(words[i++]);
int g=boost::lexical_cast<int>(words[i++]);
int b=boost::lexical_cast<int>(words[i++]);
*/
}
} catch (const boost::bad_lexical_cast& blc) {
AsciiError("can't parse face: bad data");
return std::make_unique<PolySet>(3);
}
}

//PRINTDB("PS: %ld vertices, %ld indices", ps->vertices.size() % ps->indices.size());
return ps;
}

0 comments on commit ded24c5

Please sign in to comment.