Skip to content

Commit

Permalink
Added the support of facet counts for the map reduce
Browse files Browse the repository at this point in the history
  • Loading branch information
vzhabiuk committed Jun 8, 2012
1 parent a99979b commit adf9d0c
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 7 deletions.
@@ -1,11 +1,35 @@
package com.browseengine.bobo.mapred;

import java.util.List;

import com.browseengine.bobo.api.BoboIndexReader;
import com.browseengine.bobo.facets.FacetCountCollector;

/**
* Is the part of the bobo request, that maintains the map result intermediate state
*
*/
public interface BoboMapFunctionWrapper {
public void mapFullIndexReader(BoboIndexReader reader);
/**
* When there is no filter, map reduce will try to map the entire segment
* @param reader
*/
public void mapFullIndexReader(BoboIndexReader reader, FacetCountCollector[] facetCountCollectors);
/**
* The basic callback method for a single doc
* @param docId
* @param reader
*/
public void mapSingleDocument(int docId, BoboIndexReader reader);
public void finalizeSegment(BoboIndexReader reader);
/**
* The callback method, after the segment was processed
* @param reader
*/
public void finalizeSegment(BoboIndexReader reader, FacetCountCollector[] facetCountCollectors);
/**
* The callback method, after the partition was processed
*
*/
public void finalizePartition();
public MapReduceResult getResult();
}
Expand Up @@ -4,6 +4,10 @@
import java.util.ArrayList;
import java.util.List;

/**
* Keeps the map reduce results
*
*/
public class MapReduceResult implements Serializable {
protected List mapResults = new ArrayList(200);
protected Serializable reduceResult;
Expand Down
Expand Up @@ -106,7 +106,15 @@ public void setNextReader(BoboIndexReader reader,int docBase) throws IOException
}
_countCollectors = collectorList.toArray(new FacetCountCollector[collectorList.size()]);
}

public FacetCountCollector[] getCountCollectors() {
List<FacetCountCollector> collectors = new ArrayList<FacetCountCollector>();
collectors.addAll(Arrays.asList(_countCollectors));
for (FacetHitCollector facetHitCollector : _collectors) {
collectors.addAll(facetHitCollector._collectAllCollectorList);
collectors.addAll(facetHitCollector._countCollectorList);
}
return collectors.toArray(new FacetCountCollector[collectors.size()]);
}
}

private final static class DefaultFacetValidator extends FacetValidator{
Expand Down Expand Up @@ -286,9 +294,6 @@ public void search(Weight weight, Filter filter, Collector collector, int start,

collector.setScorer(scorer);
target = scorer.nextDoc();
if (target!=DocIdSetIterator.NO_MORE_DOCS && mapReduceWrapper != null) {
mapReduceWrapper.mapFullIndexReader(_subReaders[i]);
}
while(target!=DocIdSetIterator.NO_MORE_DOCS)
{
if(validator.validate(target))
Expand All @@ -303,6 +308,9 @@ public void search(Weight weight, Filter filter, Collector collector, int start,
}
}
}
if (mapReduceWrapper != null) {
mapReduceWrapper.mapFullIndexReader(_subReaders[i], validator.getCountCollectors());
}
}
return;
}
Expand Down Expand Up @@ -371,7 +379,7 @@ public void search(Weight weight, Filter filter, Collector collector, int start,
target = filterDocIdIterator.advance(doc);
}
}
mapReduceWrapper.finalizeSegment(_subReaders[i]);
mapReduceWrapper.finalizeSegment(_subReaders[i], validator.getCountCollectors());
}
}
}
Expand Down

0 comments on commit adf9d0c

Please sign in to comment.