Skip to content

Commit

Permalink
- added a new Crawler Balancer: HostBalancer and HostQueues:
Browse files Browse the repository at this point in the history
This organizes all urls to be loaded in separate queues for each host.
Each host separates the crawl depth into it's own queue. The primary
rule for urls taken from any queue is, that the crawl depth is minimal.
This produces a crawl depth which is identical to the clickdepth.
Furthermorem the crawl is able to create a much better balancing over
all hosts which is fair to all hosts that are in the queue.
This process will create a very large number of files for wide crawls in
the QUEUES folder: for each host a directory, for each crawl depth a
file inside the directory. A crawl with maxdepth = 4 will be able to
create 10.000s of files. To be able to use that many file readers, it
was necessary to implement a new index data structure which opens the
file only if an access is wanted (OnDemandOpenFileIndex). The usage of
such on-demand file reader shall prevent that the number of file
pointers is over the system limit, which is usually about 10.000 open
files. Some parts of YaCy had to be adopted to handle the crawl depth
number correctly. The logging and the IndexCreateQueues servlet had to
be adopted to show the crawl queues differently, because the host name
is attached to the port on the host to differentiate between http,
https, and ftp services.
  • Loading branch information
Orbiter committed Apr 16, 2014
1 parent 075b6f9 commit da86f15
Show file tree
Hide file tree
Showing 32 changed files with 1,372 additions and 384 deletions.
1 change: 1 addition & 0 deletions defaults/yacy.logging
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ sun.net.www.protocol.http.HttpURLConnection.level = INFO
# Tray
sun.awt.level = OFF
java.awt.level = OFF
TABLE.level = INFO

# List of global handlers
handlers = java.util.logging.FileHandler,\
Expand Down
8 changes: 7 additions & 1 deletion htroot/ConfigPortal.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.Properties;

import net.yacy.cora.document.id.DigestURL;
Expand Down Expand Up @@ -99,7 +100,12 @@ public static serverObjects respond(final RequestHeader header, final serverObje

String excludehosts = post.get("search.excludehosts", "");
sb.setConfig("search.excludehosts", excludehosts);
sb.setConfig("search.excludehosth", DigestURL.hosthashes(excludehosts));
try {
sb.setConfig("search.excludehosth", DigestURL.hosthashes(excludehosts));
} catch (MalformedURLException e) {
ConcurrentLog.logException(e);
sb.setConfig("search.excludehosth", "");
}
}
if (post.containsKey("searchpage_default")) {
// load defaults from defaults/yacy.init file
Expand Down
4 changes: 4 additions & 0 deletions htroot/Crawler_p.java
Original file line number Diff line number Diff line change
Expand Up @@ -339,16 +339,20 @@ public static serverObjects respond(final RequestHeader header, final serverObje
Set<String> hosthashes = new HashSet<String>();
for (DigestURL u: rootURLs) hosthashes.add(u.hosthash());
sb.index.fulltext().deleteStaleDomainHashes(hosthashes, deleteageDate);
sb.crawlQueues.removeHosts(hosthashes);
}
} else if (subPath) {
siteFilter = CrawlProfile.subpathFilter(rootURLs);
if (deleteold) {
Set<String> hosthashes = new HashSet<String>();
for (DigestURL u: rootURLs) {
hosthashes.add(u.hosthash());
String basepath = u.toNormalform(true);
if (!basepath.endsWith("/")) {int p = basepath.lastIndexOf("/"); if (p > 0) basepath = basepath.substring(0, p + 1);}
int count = sb.index.fulltext().remove(basepath, deleteageDate);
if (count > 0) ConcurrentLog.info("Crawler_p", "deleted " + count + " documents for host " + u.getHost());
}
sb.crawlQueues.removeHosts(hosthashes);
}
}
if (CrawlProfile.MATCH_ALL_STRING.equals(newcrawlingMustMatch)) {
Expand Down
2 changes: 1 addition & 1 deletion htroot/IndexCreateQueues_p.html
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ <h2>'#[queuename]#' Crawl Queue</h2>
<tr class="TableCellDark">
<td>#[hostcount]#</td>
<td>#[hostdelta]#</td>
<td><a href="IndexCreateQueues_p.html?#(embed)#::embed=&#(/embed)#delete=&stack=#[queuename]#&option=1&pattern=.*#[hostname]#.*&urlsPerHost=#[urlsPerHost]#"><img src="env/grafics/trash.gif"></a>&nbsp;#[hostname]#</td>
<td><a href="IndexCreateQueues_p.html?#(embed)#::embed=&#(/embed)#delete=&stack=#[queuename]#&option=1&pattern=.*#[hostname]#.*&urlsPerHost=#[urlsPerHost]#"><img src="env/grafics/trash.gif"></a>&nbsp;#[hostnameport]#</td>
<td colspan="6"></td>
</tr>
#{list}#
Expand Down
75 changes: 50 additions & 25 deletions htroot/IndexCreateQueues_p.java
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@

import java.net.MalformedURLException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;

import net.yacy.cora.document.encoding.ASCII;
import net.yacy.cora.document.id.DigestURL;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.CrawlSwitchboard;
Expand Down Expand Up @@ -75,30 +79,45 @@ public static serverObjects respond(@SuppressWarnings("unused") final RequestHea
}
}
} else {
// iterating through the list of URLs
final Iterator<Request> iter = sb.crawlQueues.noticeURL.iterator(stackType);
Request entry;
final List<byte[]> removehashes = new ArrayList<byte[]>();
while (iter.hasNext()) {
if ((entry = iter.next()) == null) continue;
String value = null;

location: switch (option) {
case URL: value = (entry.url() == null) ? null : entry.url().toString(); break location;
case ANCHOR: value = entry.name(); break location;
case DEPTH: value = Integer.toString(entry.depth()); break location;
case INITIATOR:
value = (entry.initiator() == null || entry.initiator().length == 0) ? "proxy" : ASCII.String(entry.initiator());
break location;
case MODIFIED: value = daydate(entry.appdate()); break location;
default: value = null; break location;
int removedByHosts = 0;
if (option == URL && deletepattern.startsWith(".*") && deletepattern.endsWith(".*")) {
// try to delete that using the host name
Set<String> hosthashes = new HashSet<String>();
String hn = deletepattern.substring(2, deletepattern.length() - 2);
try {
hosthashes.add(DigestURL.hosthash(hn, hn.startsWith("ftp") ? 21 : 80));
hosthashes.add(DigestURL.hosthash(hn, 443));
removedByHosts = sb.crawlQueues.removeHosts(hosthashes);
} catch (MalformedURLException e) {
}

if (value != null && compiledPattern.matcher(value).matches()) removehashes.add(entry.url().hash());
}
ConcurrentLog.info("IndexCreateQueues_p", "created a remove list with " + removehashes.size() + " entries for pattern '" + deletepattern + "'");
for (final byte[] b: removehashes) {
sb.crawlQueues.noticeURL.removeByURLHash(b);

if (removedByHosts == 0) {
// iterating through the list of URLs
final Iterator<Request> iter = sb.crawlQueues.noticeURL.iterator(stackType);
Request entry;
final List<byte[]> removehashes = new ArrayList<byte[]>();
while (iter.hasNext()) {
if ((entry = iter.next()) == null) continue;
String value = null;

location: switch (option) {
case URL: value = (entry.url() == null) ? null : entry.url().toString(); break location;
case ANCHOR: value = entry.name(); break location;
case DEPTH: value = Integer.toString(entry.depth()); break location;
case INITIATOR:
value = (entry.initiator() == null || entry.initiator().length == 0) ? "proxy" : ASCII.String(entry.initiator());
break location;
case MODIFIED: value = daydate(entry.appdate()); break location;
default: value = null; break location;
}

if (value != null && compiledPattern.matcher(value).matches()) removehashes.add(entry.url().hash());
}
ConcurrentLog.info("IndexCreateQueues_p", "created a remove list with " + removehashes.size() + " entries for pattern '" + deletepattern + "'");
for (final byte[] b: removehashes) {
sb.crawlQueues.noticeURL.removeByURLHash(b);
}
}
}
} catch (final PatternSyntaxException e) {
Expand All @@ -121,13 +140,17 @@ public static serverObjects respond(@SuppressWarnings("unused") final RequestHea

int hc = 0;
for (Map.Entry<String, Integer[]> host: hosts.entrySet()) {
prop.putHTML("crawler_host_" + hc + "_hostname", host.getKey());
String hostnameport = host.getKey();
int p = hostnameport.lastIndexOf(':');
String hostname = p < 0 ? hostnameport : hostnameport.substring(0, p);
prop.putHTML("crawler_host_" + hc + "_hostnameport", hostnameport);
prop.putHTML("crawler_host_" + hc + "_hostname", hostname);
prop.put("crawler_host_" + hc + "_embed", embed ? 1 : 0);
prop.put("crawler_host_" + hc + "_urlsPerHost", urlsPerHost);
prop.putHTML("crawler_host_" + hc + "_queuename", stackType.name());
prop.put("crawler_host_" + hc + "_hostcount", host.getValue()[0]);
prop.put("crawler_host_" + hc + "_hostdelta", host.getValue()[1] == Integer.MIN_VALUE ? "not accessed" : Integer.toString(host.getValue()[1]));
List<Request> domainStackReferences = sb.crawlQueues.noticeURL.getDomainStackReferences(stackType, host.getKey(), urlsPerHost, 10000);
List<Request> domainStackReferences = sb.crawlQueues.noticeURL.getDomainStackReferences(stackType, hostname, urlsPerHost, 10000);

Seed initiator;
String profileHandle;
Expand All @@ -138,9 +161,11 @@ public static serverObjects respond(@SuppressWarnings("unused") final RequestHea
initiator = sb.peers.getConnected(request.initiator() == null ? "" : ASCII.String(request.initiator()));
profileHandle = request.profileHandle();
profileEntry = profileHandle == null ? null : sb.crawler.getActive(profileHandle.getBytes());
String depthString = Integer.toString(request.depth());
while (depthString.length() < 4) depthString = "0" + depthString;
prop.putHTML("crawler_host_" + hc + "_list_" + count + "_initiator", ((initiator == null) ? "proxy" : initiator.getName()) );
prop.put("crawler_host_" + hc + "_list_" + count + "_profile", ((profileEntry == null) ? "unknown" : profileEntry.collectionName()));
prop.put("crawler_host_" + hc + "_list_" + count + "_depth", request.depth());
prop.putHTML("crawler_host_" + hc + "_list_" + count + "_depth", depthString);
prop.put("crawler_host_" + hc + "_list_" + count + "_modified", daydate(request.appdate()) );
prop.putHTML("crawler_host_" + hc + "_list_" + count + "_anchor", request.name());
prop.putHTML("crawler_host_" + hc + "_list_" + count + "_url", request.url().toNormalform(true));
Expand Down
23 changes: 12 additions & 11 deletions source/net/yacy/cora/document/id/DigestURL.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.util.ByteArray;
import net.yacy.cora.util.CommonPattern;
import net.yacy.cora.util.ConcurrentLog;

/**
* URI-object providing YaCy-hash computation
Expand All @@ -57,18 +56,19 @@ public class DigestURL extends MultiProtocolURL implements Serializable {
/**
* Shortcut, calculate hash for shorted url/hostname
* @param host
* @param port
* @return
*/
public static String hosthash(final String host) {
public static String hosthash(final String host, final int port) throws MalformedURLException {
String h = host;
if (!h.startsWith("http://")) h = "http://" + h;
DigestURL url = null;
try {
url = new DigestURL(h);
} catch (final MalformedURLException e) {
ConcurrentLog.logException(e);
return null;
if (h.indexOf("//") < 0) {
if (port == 80 || port == 8080 || port == 8090) h = "http://" + h;
else if (port == 443) h = "https://" + h;
else if (port == 21 || port == 2121) h = "ftp://" + h;
else if (port > 999) h = "http://" + h + ":" + port;
else h = "http://" + h;
}
DigestURL url = new DigestURL(h);
return (url == null) ? null : ASCII.String(url.hash(), 6, 6);
}

Expand All @@ -77,15 +77,16 @@ public static String hosthash(final String host) {
* the list is separated by comma
* @param hostlist
* @return list of host hashes without separation
* @throws MalformedURLException
*/
public static String hosthashes(final String hostlist) {
public static String hosthashes(final String hostlist) throws MalformedURLException {
String[] hs = CommonPattern.COMMA.split(hostlist);
StringBuilder sb = new StringBuilder(hostlist.length());
for (String h: hs) {
if (h == null) continue;
h = h.trim();
if (h.isEmpty()) continue;
h = hosthash(h);
h = hosthash(h, h.startsWith("ftp.") ? 21 : 80);
if (h == null || h.length() != 6) continue;
sb.append(h);
}
Expand Down
8 changes: 8 additions & 0 deletions source/net/yacy/crawler/Balancer.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.SpaceExceededException;
Expand Down Expand Up @@ -62,6 +63,13 @@ public interface Balancer {
*/
public int removeAllByProfileHandle(final String profileHandle, final long timeout) throws IOException, SpaceExceededException;

/**
* delete all urls which are stored for given host hashes
* @param hosthashes
* @return number of deleted urls
*/
public int removeAllByHostHashes(final Set<String> hosthashes);

/**
* @param urlHashes, a list of hashes that shall be removed
* @return number of entries that had been removed
Expand Down
Loading

0 comments on commit da86f15

Please sign in to comment.