Skip to content

Commit

Permalink
fix for deadlock as seen in http://forum.yacy-websuche.de/viewtopic.p…
Browse files Browse the repository at this point in the history
…hp?p=17521#p17521

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6343 6c8d7289-2bf4-0310-a012-ef5d649a1542
  • Loading branch information
orbiter committed Sep 24, 2009
1 parent 031e6ee commit 1171a72
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 44 deletions.
91 changes: 48 additions & 43 deletions source/de/anomic/kelondro/text/ReferenceOrder.java
Expand Up @@ -43,9 +43,9 @@
import de.anomic.yacy.yacyURL;

public class ReferenceOrder {
protected int maxdomcount;
protected WordReferenceVars min, max;
protected WordReferenceVars min, max;
protected final ScoreCluster<String> doms; // collected for "authority" heuristic
private final RankingProfile ranking;
private String language;
Expand All @@ -60,73 +60,78 @@ public ReferenceOrder(final RankingProfile profile, String language) {
}

public class Normalizer extends Thread {
private ReferenceContainer<WordReference> container;
private BlockingQueue<WordReferenceVars> decodedEntries;
private ReferenceContainer<WordReference> container;
private BlockingQueue<WordReferenceVars> decodedEntries;
public Normalizer(final ReferenceContainer<WordReference> container) {
// normalize ranking: find minimum and maximum of separate ranking criteria
// normalize ranking: find minimum and maximum of separate ranking criteria
assert (container != null);
this.container = container;
this.decodedEntries = new LinkedBlockingQueue<WordReferenceVars>();
}
}

public void run() {
BlockingQueue<WordReferenceVars> vars = WordReferenceVars.transform(container);
BlockingQueue<WordReferenceVars> vars = WordReferenceVars.transform(container);

WordReferenceVars entryMin = null;
WordReferenceVars entryMax = null;
WordReferenceVars entryMax = null;
HashMap<String, Integer> doms0 = new HashMap<String, Integer>();
Integer int1 = 1;

WordReferenceVars iEntry;
String dom;
Integer count;
try {
while ((iEntry = vars.take()) != WordReferenceVars.poison) {
decodedEntries.put(iEntry);
// find min/max
if (entryMin == null) entryMin = iEntry.clone(); else entryMin.min(iEntry);
if (entryMax == null) entryMax = iEntry.clone(); else entryMax.max(iEntry);
// update domcount
dom = iEntry.metadataHash().substring(6);
count = doms0.get(dom);
if (count == null) {
doms0.put(dom, int1);
} else {
doms0.put(dom, Integer.valueOf(count.intValue() + 1));
}
}
} catch (InterruptedException e) {}

if (min == null) min = entryMin.clone(); else min.min(entryMin);
if (max == null) max = entryMax.clone(); else max.max(entryMax);
Map.Entry<String, Integer> entry;
final Iterator<Map.Entry<String, Integer>> di = doms0.entrySet().iterator();
while (di.hasNext()) {
entry = di.next();
doms.addScore(entry.getKey(), (entry.getValue()).intValue());
while ((iEntry = vars.take()) != WordReferenceVars.poison) {
decodedEntries.put(iEntry);
// find min/max
if (entryMin == null) entryMin = iEntry.clone(); else entryMin.min(iEntry);
if (entryMax == null) entryMax = iEntry.clone(); else entryMax.max(iEntry);
// update domcount
dom = iEntry.metadataHash().substring(6);
count = doms0.get(dom);
if (count == null) {
doms0.put(dom, int1);
} else {
doms0.put(dom, Integer.valueOf(count.intValue() + 1));
}
}

if (min == null) min = entryMin.clone(); else min.min(entryMin);
if (max == null) max = entryMax.clone(); else max.max(entryMax);
Map.Entry<String, Integer> entry;
final Iterator<Map.Entry<String, Integer>> di = doms0.entrySet().iterator();
while (di.hasNext()) {
entry = di.next();
doms.addScore(entry.getKey(), (entry.getValue()).intValue());
}

if (doms.size() > 0) maxdomcount = doms.getMaxScore();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
decodedEntries.put(WordReferenceVars.poison);
} catch (InterruptedException e) {}
}

if (doms.size() > 0) maxdomcount = doms.getMaxScore();
try {
decodedEntries.put(WordReferenceVars.poison);
} catch (InterruptedException e) {}
}

public BlockingQueue<WordReferenceVars> decoded() {
return this.decodedEntries;
return this.decodedEntries;
}
}

public BlockingQueue<WordReferenceVars> normalizeWith(final ReferenceContainer<WordReference> container) {
Normalizer n = new Normalizer(container);
n.start();
return n.decoded();
Normalizer n = new Normalizer(container);
n.start();
return n.decoded();
}

public int authority(final String urlHash) {
return (doms.getScore(urlHash.substring(6)) << 8) / (1 + this.maxdomcount);
return (doms.getScore(urlHash.substring(6)) << 8) / (1 + this.maxdomcount);
}

public long cardinal(final WordReferenceVars t) {
Expand Down
5 changes: 4 additions & 1 deletion source/de/anomic/search/RankingProcess.java
Expand Up @@ -39,6 +39,7 @@
import java.util.TreeSet;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;

import de.anomic.document.Condenser;
import de.anomic.document.Word;
Expand Down Expand Up @@ -184,7 +185,9 @@ public void add(final ReferenceContainer<WordReference> index, final boolean loc

// apply all constraints
try {
while ((iEntry = decodedEntries.take()) != WordReferenceVars.poison) {
while (true) {
iEntry = decodedEntries.poll(1, TimeUnit.SECONDS);
if (iEntry == null || iEntry == WordReferenceVars.poison) break;
assert (iEntry.metadataHash().length() == index.row().primaryKeyLength);
//if (iEntry.urlHash().length() != index.row().primaryKeyLength) continue;

Expand Down

0 comments on commit 1171a72

Please sign in to comment.