Permalink
Browse files

.

  • Loading branch information...
1 parent c0bfe19 commit 4641964645273a7154ece8a7cb1dafa36dee7a8c Alexander Rush committed Mar 29, 2012
Showing with 168 additions and 237 deletions.
  1. +51 −36 CubeLM.cpp
  2. +57 −53 CubeLM.h
  3. +41 −135 hypergraph/CubePruning.cpp
  4. +17 −12 hypergraph/CubePruning.h
  5. +1 −1 hypergraph/HypergraphImpl.cpp
  6. +1 −0 interfaces/hypergraph/hypergraph.proto
View
87 CubeLM.cpp
@@ -18,82 +18,97 @@
#include "common.h"
using namespace std;
-Cache <Hypernode, int > * cache_word_nodes(Ngram lm, const Forest & forest) {
+DEFINE_string(forest_prefix, "", "prefix of the forest files");
+DEFINE_string(forest_range, "", "range of forests to use (i.e. '0 10')");
+DEFINE_int64(cube_size, 100, "size of the beam cube");
+
+static const bool forest_dummy = RegisterFlagValidator(&FLAGS_forest_prefix, &ValidateReq);
+static const bool range_dummy = RegisterFlagValidator(&FLAGS_forest_range, &ValidateRange);
+
+// Build a cache mapping each nodes to their LM index.
+Cache<Hypernode, int > *cache_word_nodes(Ngram lm, const Forest & forest) {
int max = lm.vocab.numWords();
int unk = lm.vocab.getIndex(Vocab_Unknown);
- Cache <Hypernode, int > * words = new Cache <Hypernode, int >(forest.num_nodes());
- foreach (HNode hnode, forest.nodes()) { //int i=0; i< forest.num_nodes(); i++ ) {
- const ForestNode & node = * ((ForestNode*)hnode);// (ForestNode) forest.get_node(i);
+ Cache<Hypernode, int > * words = new Cache <Hypernode, int >(forest.num_nodes());
+ foreach (HNode hnode, forest.nodes()) {
+ const ForestNode & node = * ((ForestNode*)hnode);
if (node.is_word()) {
string str = node.word();
int ind = lm.vocab.getIndex(str.c_str());
- //cout << node.id() << endl;
+ // Unknown cases.
if (ind == -1 || ind > max) {
words->set_value(node, unk);
- //cout << "Word " << unk;
} else {
words->set_value(node, ind);
- //cout << "Word " << ind;
}
}
}
return words;
}
-
-DEFINE_string(forest_prefix, "", "prefix of the forest files");
-DEFINE_string(forest_range, "", "range of forests to use (i.e. '0 10')");
-DEFINE_int64(cube_size, 100, "size of the beam cube");
-
-static const bool forest_dummy = RegisterFlagValidator(&FLAGS_forest_prefix, &ValidateReq);
-static const bool range_dummy = RegisterFlagValidator(&FLAGS_forest_range, &ValidateRange);
-
-
int main(int argc, char ** argv) {
- //cout << argc << endl;
google::ParseCommandLineFlags(&argc, &argv, true);
wvector * weight = cmd_weights();
Ngram * lm = cmd_lm();
-
+ int n_best = 10;
- //cout << "START!!!!" << endl;
GOOGLE_PROTOBUF_VERIFY_VERSION;
istringstream range(FLAGS_forest_range);
int start_range, end_range;
range >> start_range >> end_range;
for (int i = start_range; i <= end_range; i++) {
-
-
- //Hypergraph hgraph;
stringstream fname;
fname << FLAGS_forest_prefix << i;
Forest f = Forest::from_file(fname.str().c_str());
-
- // Optional: Delete all global objects allocated by libprotobuf.
- //google::protobuf::ShutdownProtobufLibrary();
-
-
- //f.append_end_nodes();
+
HypergraphAlgorithms ha(f);
Cache<Hyperedge, double> * w = ha.cache_edge_weights( *weight);
Cache<Hypernode, int> * words = cache_word_nodes(*lm, f);
clock_t begin=clock();
int cube = FLAGS_cube_size;
CubePruning p(f, *w, LMNonLocal(f, *lm, lm_weight(), *words), cube, 3);
- double v =p.parse();
+ double v =p.parse();
clock_t end=clock();
- cout << "*TRANS* " << i << " ";
- vector <int> sent;
- p.get_derivation(sent);
- foreach (int s, sent) {
- cout <<((ForestNode *) &f.get_node(s))->word() << " ";
+ //cout << "*TRANS* " << i << " ";
+ for (int n = 0; n < n_best; ++n) {
+ vector<int> sent;
+ p.get_derivation(sent, n);
+ cout << i << " ||| ";
+ double lm_score = 0.0;
+ for (int j = 0; j < sent.size(); ++j) {
+ int s = sent[j];
+ string word = ((ForestNode *) &f.get_node(s))->word();
+ if (!(word == "<s>" || word == "</s>")) {
+ cout << word << " ";
+ }
+ if (j > 1) {
+ VocabIndex context [] = { words->store[sent[j - 1]],
+ words->store[sent[j - 2]],
+ Vocab_None };
+ lm_score += lm->wordProb(words->store[sent[j]], context);
+ }
+ }
+ cout << " ||| " ;
+ vector<int> edges;
+ p.get_edges(edges, n);
+ svector<int, double> vector;
+ foreach (int e, edges) {
+ vector += f.get_edge(e).fvector();
+ }
+ for(int i = 0; i < vector.size(); ++i) {
+ cout << vector[i] << " ";
+ }
+ cout << lm_score << " ";
+ double score = p.get_score(n);
+
+ cout << " ||| " << -score;
+ cout << endl;
}
- cout << endl;
- cout << "*END*" << i << " "<< v << " " << cube<<" " << (double)Clock::diffclock(end,begin) << endl;
+ //cout << "*END*" << i << " "<< v << " " << cube<<" " << (double)Clock::diffclock(end,begin) << endl;
}
return 0;
View
110 CubeLM.h
@@ -8,49 +8,45 @@
#include "common.h"
class LMNonLocal: public NonLocal {
public:
- //~LMNonLocal(){}
- LMNonLocal(const HGraph & forest, Ngram & lm, double lm_weight, const Cache <Hypernode, int> & word_cache)
+ LMNonLocal(const HGraph & forest,
+ Ngram & lm,
+ double lm_weight,
+ const Cache <Hypernode, int> & word_cache)
: _forest(forest), _lm(lm), _lm_weight(lm_weight), _word_cache(word_cache) {}
- void compute(const Hyperedge & edge,
- const vector <vector <int> > & subder,
- double & score,
- vector <int> & full_derivation,
- vector <int> & sig
- ) const {
+ void compute(const Hyperedge &edge,
+ const vector <vector <int> > &subder,
+ double &score,
+ vector <int> &full_derivation,
+ vector <int> &signature) const {
full_derivation.clear();
- sig.clear();
+ signature.clear();
score =0.0;
- //cout << "COMBINE " << subder.size() <<endl;
- for (unsigned int i =0; i < subder.size(); i++) {
- unsigned int size = full_derivation.size();
+
+ for (uint i = 0; i < subder.size(); i++) {
+ uint size = full_derivation.size();
int orig = subder[i][0];
int w = _word_cache.store[orig];
- if (size >= 2) {
-
- VocabIndex context [] = {_word_cache.store[full_derivation[size-1]],
- _word_cache.store[full_derivation[size-2]],
+ if (size >= 2) {
+ VocabIndex context [] = {_word_cache.store[full_derivation[size - 1]],
+ _word_cache.store[full_derivation[size - 2]],
Vocab_None};
score += _lm.wordProb(w, context);
- //cout << _lm.wordProb(w, context) << " " << _lm.vocab.getWord(w) << " " << _lm.vocab.getWord(context[1]) << " " << _lm.vocab.getWord(context[0]) << endl;
- // subtract out uni
- if (w!=1 && w!=2) {
+
+ // Subtract out unigram probability.
+ if (w != 1 && w != 2) {
const VocabIndex context2 [] = {Vocab_None};
score -= _lm.wordProb(w, context2);
- //cout << "bonus" << endl;
}
- //cout << "\t" << score <<endl;
- //cout << "\t" << "TRIGRAM " << w << " " << full_derivation[size-1] << " " << full_derivation[size-2] <<endl;
+
} else if (size ==1 && w != 1 ) {
- if (w !=1 && w!= 2) {
+ if (w != 1 && w != 2) {
VocabIndex context [] = {_word_cache.store[full_derivation[size-1]], Vocab_None};
score += _lm.wordProb(w, context);
-
-
- // subtract out uni
-
+
+ // Subtract out unigram probability.
const VocabIndex context2 [] = {Vocab_None};
score -= _lm.wordProb(w, context2);
//cout << "bonus" << endl;
@@ -60,59 +56,67 @@ class LMNonLocal: public NonLocal {
if (size >=1 && subder[i].size() > 1 ) {
const VocabIndex context [] = {w, _word_cache.store[full_derivation[size-1]], Vocab_None};
score += _lm.wordProb(_word_cache.store[subder[i][1]], context);
- //cout << "\t" << score <<endl;
- //cout << "\t" << "Wait TRIGRAM " << subder[i][1] << " " << w << " " << full_derivation[size-1] << " " << full_derivation[size-2] <<endl;
- //cout << "\t" << "SCORE " << _lm.wordProb(subder[i][1], context) << endl;
if ( _word_cache.store[subder[i][1]]!= 1 && _word_cache.store[subder[i][1]]!=2) {
const VocabIndex context2 [] = {w, Vocab_None};
score -= _lm.wordProb(_word_cache.store[subder[i][1]], context2);
- //cout << "bonus" << endl;
}
}
- //cout << "\t" << size <<endl;
foreach (int final, subder[i]) {
- //cout << _lm.vocab.getWord(subder[i][j]) << " " ;
full_derivation.push_back(final);
}
}
- //cout << endl;
+
score *= _lm_weight;
- //cout << score <<endl;
- //cout << full_derivation.size() << endl;;
int size = full_derivation.size();
- sig.push_back(_word_cache.store[full_derivation[0]]);
- sig.push_back(_word_cache.store[full_derivation[size-1]]);
+ signature.push_back(_word_cache.store[full_derivation[0]]);
+ signature.push_back(_word_cache.store[full_derivation[size-1]]);
assert(size > 0);
if (size!=1) {
- sig.push_back(_word_cache.store[full_derivation[1]]);
- sig.push_back(_word_cache.store[full_derivation[size-2]]);
+ signature.push_back(_word_cache.store[full_derivation[1]]);
+ signature.push_back(_word_cache.store[full_derivation[size-2]]);
}
}
- Hyp initialize(const Hypernode & node) const {
- assert (node.is_terminal());
- int original = node.id();
- int w = _word_cache.get_value(node);
+ // Initialize the hypothesis for leave nodes.
+ Hyp initialize(const Hypernode &node) const {
+ assert(node.is_terminal());
+ int word_index = _word_cache.get_value(node);
double score = 0.0;
VocabIndex context [] = {Vocab_None};
- if (w!=1 && w!=2) {
- score += _lm.wordProb(w, context);
+
+ // Not a special word (todo: fix).
+ if (word_index != 1 && word_index != 2) {
+ // Unigram probability.
+ score += _lm.wordProb(word_index, context);
score *= _lm_weight;
}
- //cout << "WORD " << _word_cache.get_value(node) << " "<< _lm.vocab.getWord(w)<< endl;
- vector <int> sig;
- sig.push_back(w);
- sig.push_back(w);
- vector <int> der;
- der.push_back(original);
- return Hyp(score, sig, der);
+
+ // Signature (left and right words).
+ vector <int> signature;
+ signature.push_back(word_index);
+ signature.push_back(word_index);
+
+ // Build up the dervation of hypernodes.
+ vector <int> derivation;
+ derivation.push_back(node.id());
+
+ vector <int> edges;
+ return Hyp(score, signature, derivation, edges);
}
+
private:
+ // The underlying hypergraph.
const HGraph & _forest;
+
+ // The language model.
Ngram & _lm;
+
+ // Weight to give to the language model.
const double _lm_weight;
+
+ // The language model index for each hypernode.
const Cache <Hypernode, int> & _word_cache;
};
View
176 hypergraph/CubePruning.cpp
@@ -5,22 +5,27 @@
using namespace std;
-
-
-//typedef priority_queue< const Candidate * > Candidates;
-
double CubePruning::parse() {
- run(_forest.root(), _hypothesis_cache.store[_forest.root().id()]);
+ run(_forest.root(), _hypothesis_cache.store[_forest.root().id()]);
return _hypothesis_cache.store[_forest.root().id()][0].score;
- //cout << _hypothesis_cache.store[_forest.root().id()][0].score << endl;
- //cout << _hypothesis_cache.store[_forest.root().id()][1].score << endl;
- //cout << _hypothesis_cache.store[_forest.root().id()][2].score << endl;
}
-void CubePruning::get_derivation(vector <int> & der) {
+void CubePruning::get_derivation(vector<int> &der) {
der = _hypothesis_cache.store[_forest.root().id()][0].full_derivation;
}
+void CubePruning::get_derivation(vector<int> &der, int n) {
+ der = _hypothesis_cache.store[_forest.root().id()][n].full_derivation;
+}
+
+void CubePruning::get_edges(vector<int> &edges, int n) {
+ edges = _hypothesis_cache.store[_forest.root().id()][n].edges;
+}
+
+double CubePruning::get_score(int n) {
+ return _hypothesis_cache.store[_forest.root().id()][n].score;
+}
+
void CubePruning::run(const Hypernode & cur_node, vector <Hyp> & kbest_hyps) {
//compute the k-'best' list for cur_node
foreach (HEdge hedge, cur_node.edges()) {
@@ -35,32 +40,15 @@ void CubePruning::run(const Hypernode & cur_node, vector <Hyp> & kbest_hyps) {
//create cube
if (!cur_node.is_terminal()) {
Candidates cands;
- //cout << "Starting cube" << endl;
init_cube(cur_node, cands);
-
- //heapq.heapify(cands);
-
- // gen kbest
- //vector<Hyp> kbest_hyp;
- //cout << "DOING NODE: " << cur_node.id() <<endl;
-
- kbest(cands, kbest_hyps);
- //cout << kbest_hyps.size() << endl;
- //cout << "SIZE " << cur_node.id() << " " << kbest_hyps.size() << endl;
+ if (cur_node.id() == _forest.root().id()) {
+ kbest(cands, kbest_hyps, false);
+ } else {
+ kbest(cands, kbest_hyps, true);
+ }
} else {
- //vector <int> * p = new vector <int> ();
- //vector<int> n;
- //n.push_back(_non_local.initialize(cur_node));
- //vector <int> sig;
- //sig.push_back(cur_node.id());
kbest_hyps.push_back(_non_local.initialize(cur_node));
- //cout << "Word " << endl;
- }
-
- //print cur_node
- //print map(str,self.hypothesis_cache[cur_node])
-
- //return kbest_hyps;
+ }
}
void CubePruning::init_cube(const Hypernode & cur_node, Candidates & cands) {
@@ -87,7 +75,7 @@ void CubePruning::init_cube(const Hypernode & cur_node, Candidates & cands) {
}
-void CubePruning::kbest(Candidates & cands, vector <Hyp> & newhypvec) {
+void CubePruning::kbest(Candidates & cands, vector <Hyp> & newhypvec, bool recombine) {
// Algorithm 2, kbest
// list of best hypvectors (buf)
@@ -105,37 +93,21 @@ void CubePruning::kbest(Candidates & cands, vector <Hyp> & newhypvec) {
while (cur_kbest < _k &&
! (cands.empty() ||
hypvec.size() >= buf_limit)) {
- //cout << buf_limit << " " << cands.size() << endl;
Candidate * cand = cands.top();
cands.pop();
const Hyp & chyp = cand->hyp;
const Hyperedge & cedge = cand->edge;
const vector <int> & cvecj = cand->vec;
- //cout << "Init vec ";
- //for (int p=0; p < cvecj.size();p++) {
- //cout << cvecj[p] << " ";
- //assert(!cvecj[p]);
- //}
- //cout << endl;
-
-
//TODO: duplicate management
- //cout << "SIG: ";
- //for (int p=0; p < chyp.sig.size();p++)
- //cout << chyp.sig[p] << " ";
- //cout << endl;
- if (sigs.find(chyp.sig) == sigs.end()) {
+ if (!recombine || sigs.find(chyp.sig) == sigs.end()) {
sigs.insert(chyp.sig);
cur_kbest += 1;
-
- //cout << cur_kbest << endl;
} else {
}
- //cout << chyp.sig << " " << chyp.score << endl;
// add hypothesis to buffer
hypvec.push_back(chyp);
@@ -159,54 +131,29 @@ void CubePruning::kbest(Candidates & cands, vector <Hyp> & newhypvec) {
*/
// RECOMBINATION (shrink buf to actual k-best list)
- // sort and combine hypevec
-
+ // Sort and combine hypevec
assert(cur_kbest);
assert(hypvec.size());
sort(hypvec.begin(), hypvec.end());
map <Sig, int> keylist;
- //vector <Hyp> newhypvec;
-
for (uint i=0; i < hypvec.size(); i++) {
Hyp item = hypvec[i];
assert(i == 0 || item.score >= hypvec[i-1].score);
- //cout << item.score << " " << endl;
-
- //for (int p=0; p < item.sig.size();p++) {
- //cout << item.sig[p] << " ";
- //}
- //cout << endl;
map<Sig, int>::iterator f = keylist.find(item.sig);
- if (f == keylist.end()) {
+ if (!recombine || f == keylist.end()) {
//cout << "miss" << endl;
keylist[item.sig] = newhypvec.size();
-
- //for (int p=0; p < item.full_derivation.size();p++) {
- //cout << item.full_derivation[p] << " ";
- //}
- //cout << item.score;
- //cout << endl;
-
newhypvec.push_back(item);
if (newhypvec.size() >= _k) {
break;
}
}
- else {
- //int pos = keylist[item.sig];
- //semiring plus
- //newhypvec[pos].add(item);
- }
- }
+ }
assert(newhypvec.size());
-
-
-
- //return newhypvec;
}
void CubePruning::next(const Hyperedge & cedge, const vector <int > & cvecj, Candidates & cands){
@@ -216,40 +163,27 @@ void CubePruning::next(const Hyperedge & cedge, const vector <int > & cvecj, Can
@param cands - current candidate list
*/
// for each dimension of the cube
- //cout << "Cur vec ";
+
assert(cvecj.size() == cedge.num_nodes());
for (uint i=0; i < cedge.num_nodes(); i++) {
// vecj' = vecj + b^i (just change the i^th dimension
vector <int> newvecj(cvecj);
newvecj[i] += 1;
-
- //for (int p=0; p < cvecj.size();p++) {
- //cout << newvecj[p] << " ";
- //}
- //cout << endl;
-
- //newvecj = cvecj[:i] + (cvecj[i]+1,) + cvecj[i+1:];
-
+
set <vector <int> > & vecs = _oldvec.store[cedge.id()];
if (vecs.find(newvecj)==vecs.end()) {
Hyp newhyp;
if (gethyp(cedge, newvecj, newhyp)){
- // add j'th dimension to the cube
+ // Add j'th dimension to the cube
_oldvec.store[cedge.id()].insert(newvecj);
- //cout << "INSERTING NEW" << endl;
int orig = cands.size();
cands.push(new Candidate(newhyp, cedge, newvecj));
assert(cands.size() != (uint)orig);
- } else {
- //cout << "no get" << endl;
- }
}
- //else {
- //cout << "seen" << endl;
- //}
+ }
}
}
@@ -260,33 +194,24 @@ bool CubePruning::gethyp(const Hyperedge & cedge, const vector <int> & vecj, Hyp
vecj-best parses along cedge. Also, apply non-local feature functions (LM)
*/
- //cout << "ENTER " << endl;
- double score = _weights.get_value(cedge);
-
- vector <vector <int> > subders;
+ double score = _weights.get_value(cedge);
+ vector<vector <int> > subders;
+ vector<int> edges;
// grab the jth best hypothesis at each node of the hyperedge
- //cout << cedge.num_nodes() << endl;
for (uint i=0; i < cedge.num_nodes(); i++) {
- const Hypernode & sub = cedge.tail_node(i);
-
+ const Hypernode & sub = cedge.tail_node(i);
if (vecj[i] >= (int)_hypothesis_cache.get_value(sub).size()) {
- //cout << "FAIL for size " << _hypothesis_cache.get_value(sub).size();
return false;
}
-
-
-
Hyp item = _hypothesis_cache.get_value(sub)[vecj[i]];
-
- //cout << "ITEM FULL DER: ";
- assert (item.full_derivation.size() != 0);
- //for (int p=0; p < item.full_derivation.size();p++)
- //cout << item.full_derivation[p] << " ";
- //cout << endl;
-
+ assert (item.full_derivation.size() != 0);
subders.push_back(item.full_derivation);
- // generic times (eventually)
+ for (uint j = 0; j < item.edges.size(); ++j) {
+ edges.push_back(item.edges[j]);
+ }
+
+ // Generic times (eventually)
score = score + item.score;
}
@@ -295,28 +220,9 @@ bool CubePruning::gethyp(const Hyperedge & cedge, const vector <int> & vecj, Hyp
Sig sig;
double non_local_score;
_non_local.compute(cedge, subders, non_local_score, full_derivation, sig);
- //cout << " NON LOCAL SCORE " << non_local_score << endl;;
score = score + non_local_score;
-
- //cout << "LOWER SIG: ";
- //for (int p=0; p < sig.size();p++)
- //cout << sig[p] << " ";
- //cout << endl;
-
- //cout << "FULL DER: ";
- //for (int p=0; p < full_derivation.size();p++)
- //cout << full_derivation[p] << " ";
- //cout << endl;
-
- //item = Hyp(score, full_derivation, cedge, vecj, sig);
-
- item = Hyp(score, sig, full_derivation);
- /*for (int p=0; p < item.full_derivation.size();p++) {
- cout << item.full_derivation[p] << " ";
- }
- cout << item.score;
- cout << endl;
- */
+ edges.push_back(cedge.id());
+ item = Hyp(score, sig, full_derivation, edges);
assert(item.full_derivation.size()!=0);
return true;
}
View
29 hypergraph/CubePruning.h
@@ -16,11 +16,12 @@ struct Hyp {
public:
Hyp(){}
- Hyp(double score_in, Sig sig_in, vector<int> full_der):
- score(score_in), sig(sig_in), full_derivation(full_der){}
+Hyp(double score_in, Sig sig_in, vector<int> full_der, const vector<int> &edges_):
+ score(score_in), sig(sig_in), full_derivation(full_der), edges(edges_){}
double score;
Sig sig;
vector <int> full_derivation;
+ vector<int> edges;
bool operator<(const Hyp & other) const {
return score < other.score;
}
@@ -56,19 +57,19 @@ class BlankNonLocal: public NonLocal {
}
virtual Hyp initialize(const Hypernode & node) const {
- return Hyp(0.0, vector<int>(), vector<int>());
+ return Hyp(0.0, vector<int>(), vector<int>(), vector<int>());
}
};
struct Candidate {
- Candidate( Hyp h, const Hyperedge & e, const vector <int> & v)
+ Candidate(Hyp h, const Hyperedge &e, const vector<int> &v)
: hyp(h), edge(e), vec(v){}
Hyp hyp;
- const Hyperedge & edge;
+ const Hyperedge &edge;
vector <int> vec;
+
bool operator<(const Candidate & other ) const {
-
return hyp < other.hyp;
}
};
@@ -84,17 +85,21 @@ typedef priority_queue <Candidate *, vector<Candidate*>, candidate_compare> Cand
class CubePruning {
public:
- CubePruning(const HGraph & forest, const Cache <Hyperedge, double> & weights, const NonLocal & non_local, int k, int ratio):
+ CubePruning(const HGraph & forest, const Cache <Hyperedge, double> & weights, const NonLocal & non_local,
+ int k, int ratio):
_forest(forest), _weights(weights), _non_local(non_local), _k(k), _ratio(ratio),
_hypothesis_cache(forest.num_nodes()), _oldvec(forest.num_edges())
{}
- void get_derivation(vector <int> & der);
+ void get_derivation(vector<int> &der);
+ void get_derivation(vector<int> &der, int n);
+ void get_edges(vector<int> &edges, int n);
+ double get_score(int n);
double parse();
void run(const Hypernode & cur_node, vector <Hyp> & kbest_hyps);
- void init_cube(const Hypernode & cur_node, Candidates & cands);
- void kbest(Candidates & cands, vector <Hyp> &);
+ void init_cube(const Hypernode & cur_node, Candidates &cands);
+ void kbest(Candidates & cands, vector <Hyp> &, bool recombine);
void next(const Hyperedge & cedge, const vector <int > & cvecj, Candidates & cands);
bool gethyp(const Hyperedge & cedge, const vector <int> & vecj, Hyp & item);
private:
@@ -106,8 +111,8 @@ class CubePruning {
const uint _k;
const uint _ratio;
- Cache<Hypernode, vector <Hyp> > _hypothesis_cache;
- Cache<Hyperedge, set < vector <int> > > _oldvec;
+ Cache<Hypernode, vector<Hyp> > _hypothesis_cache;
+ Cache<Hyperedge, set<vector <int> > > _oldvec;
//const Cache<Hypernode, Float> & _hypothesis_cache;
//const PriorityQueue _candidates;
View
2 hypergraph/HypergraphImpl.cpp
@@ -139,7 +139,7 @@ void HypergraphImpl::build_from_proto(Hypergraph *hgraph) {
Hypernode *forest_node = make_node(node, features);
//assert (forest_node->
- //assert (_nodes.size() == (uint)node.id());
+ assert (node.id() < hgraph->node_size());
_nodes[node.id()] = forest_node;
//assert(_nodes[forest_node->id()]->id() == forest_node->id());
}
View
1 interfaces/hypergraph/hypergraph.proto
@@ -1,3 +1,4 @@
+
option optimize_for = SPEED;
message Hypergraph {

0 comments on commit 4641964

Please sign in to comment.