Skip to content

Commit

Permalink
NUTCH-269 : OOME because no upper-bound on inlinks count (stack + jni…
Browse files Browse the repository at this point in the history
…oche)

git-svn-id: https://svn.apache.org/repos/asf/lucene/nutch/trunk@897180 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
jnioche committed Jan 8, 2010
1 parent 611980d commit b4ade8b
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 6 deletions.
2 changes: 2 additions & 0 deletions CHANGES.txt
Expand Up @@ -2,6 +2,8 @@ Nutch Change Log

Unreleased Changes

* NUTCH-269 CrawlDbReducer: OOME because no upper-bound on inlinks count (stack + jnioche)

* NUTCH-655 Injecting Crawl metadata (jnioche)

* NUTCH-658 Use counters to report fetching and parsing status (jnioche)
Expand Down
8 changes: 8 additions & 0 deletions conf/nutch-default.xml
Expand Up @@ -383,6 +383,14 @@
</description>
</property>

<property>
<name>db.update.max.inlinks</name>
<value>10000</value>
<description>Maximum number of inlinks to take into account when updating
a URL score in the crawlDB. Only the best scoring inlinks are kept.
</description>
</property>

<property>
<name>db.ignore.internal.links</name>
<value>true</value>
Expand Down
39 changes: 33 additions & 6 deletions src/java/org/apache/nutch/crawl/CrawlDbReducer.java
Expand Up @@ -19,6 +19,7 @@

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.io.IOException;

// Commons Logging imports
Expand All @@ -27,6 +28,7 @@

import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.PriorityQueue;
import org.apache.nutch.metadata.Nutch;
import org.apache.nutch.scoring.ScoringFilterException;
import org.apache.nutch.scoring.ScoringFilters;
Expand All @@ -37,7 +39,7 @@ public class CrawlDbReducer implements Reducer<Text, CrawlDatum, Text, CrawlDatu

private int retryMax;
private CrawlDatum result = new CrawlDatum();
private ArrayList<CrawlDatum> linked = new ArrayList<CrawlDatum>();
private InlinkPriorityQueue linked = null;
private ScoringFilters scfilters = null;
private boolean additionsAllowed;
private int maxInterval;
Expand All @@ -51,6 +53,8 @@ public void configure(JobConf job) {
maxInterval = job.getInt("db.fetch.interval.max", 0 );
if (oldMaxInterval > 0 && maxInterval == 0) maxInterval = oldMaxInterval * FetchSchedule.SECONDS_PER_DAY;
schedule = FetchScheduleFactory.getFetchSchedule(job);
int maxLinks = job.getInt("db.update.max.inlinks", 10000);
linked = new InlinkPriorityQueue(maxLinks);
}

public void close() {}
Expand Down Expand Up @@ -111,7 +115,7 @@ public void reduce(Text key, Iterator<CrawlDatum> values,
} else {
link = datum;
}
linked.add(link);
linked.insert(link);
break;
case CrawlDatum.STATUS_SIGNATURE:
signature = datum.getSignature();
Expand All @@ -120,13 +124,21 @@ public void reduce(Text key, Iterator<CrawlDatum> values,
LOG.warn("Unknown status, key: " + key + ", datum: " + datum);
}
}


// copy the content of the queue into a List
// in reversed order
int numLinks = linked.size();
List<CrawlDatum> linkList = new ArrayList<CrawlDatum>(numLinks);
for (int i = numLinks - 1; i >= 0; i--) {
linkList.add(linked.pop());
}

// if it doesn't already exist, skip it
if (!oldSet && !additionsAllowed) return;

// if there is no fetched datum, perhaps there is a link
if (!fetchSet && linked.size() > 0) {
fetch = linked.get(0);
if (!fetchSet && linkList.size() > 0) {
fetch = linkList.get(0);
fetchSet = true;
}

Expand Down Expand Up @@ -260,7 +272,7 @@ public void reduce(Text key, Iterator<CrawlDatum> values,
}

try {
scfilters.updateDbScore((Text)key, oldSet ? old : null, result, linked);
scfilters.updateDbScore((Text)key, oldSet ? old : null, result, linkList);
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Couldn't update score, key=" + key + ": " + e);
Expand All @@ -270,5 +282,20 @@ public void reduce(Text key, Iterator<CrawlDatum> values,
result.getMetaData().remove(Nutch.WRITABLE_GENERATE_TIME_KEY);
output.collect(key, result);
}

}

class InlinkPriorityQueue extends PriorityQueue<CrawlDatum> {

public InlinkPriorityQueue(int maxSize) {
initialize(maxSize);
}

/** Determines the ordering of objects in this priority queue. **/
protected boolean lessThan(Object arg0, Object arg1) {
CrawlDatum candidate = (CrawlDatum) arg0;
CrawlDatum least = (CrawlDatum) arg1;
return candidate.getScore() > least.getScore();
}

}

0 comments on commit b4ade8b

Please sign in to comment.