Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
- removed 'deleteComplete' flag; this was used especially for WORDS indexes
- shifted methods from plasmaSwitchboard to plasmaWordIndex

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@3051 6c8d7289-2bf4-0310-a012-ef5d649a1542
  • Loading branch information
orbiter committed Dec 6, 2006
1 parent fbe1ee4 commit 9a85f5a
Show file tree
Hide file tree
Showing 21 changed files with 125 additions and 156 deletions.
2 changes: 1 addition & 1 deletion htroot/DetailedSearch.java
Expand Up @@ -117,7 +117,7 @@ public static serverObjects respond(httpHeader header, serverObjects post, serve
return prop;
}
final String delHash = post.get("deleteref", "");
sb.removeReferences(delHash, query);
sb.wordIndex.removeReferences(query, delHash);
}

// prepare search order
Expand Down
8 changes: 4 additions & 4 deletions htroot/IndexControl_p.java
Expand Up @@ -155,7 +155,7 @@ public static serverObjects respond(httpHeader header, serverObjects post, serve
if (delurl || delurlref) {
// generate an urlx array
indexContainer index = null;
index = switchboard.wordIndex.getContainer(keyhash, null, true, -1);
index = switchboard.wordIndex.getContainer(keyhash, null, -1);
Iterator en = index.entries();
int i = 0;
urlx = new String[index.size()];
Expand Down Expand Up @@ -194,7 +194,7 @@ public static serverObjects respond(httpHeader header, serverObjects post, serve
}
Set urlHashes = new HashSet();
for (int i = 0; i < urlx.length; i++) urlHashes.add(urlx[i]);
switchboard.wordIndex.removeEntries(keyhash, urlHashes, true);
switchboard.wordIndex.removeEntries(keyhash, urlHashes);
// this shall lead to a presentation of the list; so handle that the remaining program
// thinks that it was called for a list presentation
post.remove("keyhashdelete");
Expand Down Expand Up @@ -272,7 +272,7 @@ public static serverObjects respond(httpHeader header, serverObjects post, serve
indexContainer index;
String result;
long starttime = System.currentTimeMillis();
index = switchboard.wordIndex.getContainer(keyhash, null, true, -1);
index = switchboard.wordIndex.getContainer(keyhash, null, -1);
// built urlCache
Iterator urlIter = index.entries();
HashMap knownURLs = new HashMap();
Expand Down Expand Up @@ -451,7 +451,7 @@ public static serverObjects genUrlList(plasmaSwitchboard switchboard, String key
serverObjects prop = new serverObjects();
indexContainer index = null;
try {
index = switchboard.wordIndex.getContainer(keyhash, null, true, -1);
index = switchboard.wordIndex.getContainer(keyhash, null, -1);

prop.put("genUrlList_keyHash", keyhash);

Expand Down
2 changes: 1 addition & 1 deletion htroot/htdocsdefault/dir.java
Expand Up @@ -399,7 +399,7 @@ public static void deletePhrase(plasmaSwitchboard switchboard, String urlstring,
Map.Entry entry;
while (words.hasNext()) {
entry = (Map.Entry) words.next();
switchboard.wordIndex.removeEntry(plasmaCondenser.word2hash((String) entry.getKey()), urlhash, true);
switchboard.wordIndex.removeEntry(plasmaCondenser.word2hash((String) entry.getKey()), urlhash);
}
switchboard.wordIndex.loadedURL.remove(urlhash);
} catch (Exception e) {
Expand Down
2 changes: 1 addition & 1 deletion htroot/xml/snippet.java
Expand Up @@ -53,7 +53,7 @@ public static serverObjects respond(httpHeader header, serverObjects post, serve
} else {
String error = snippet.getError();
if ((remove) && (error.equals("no matching snippet found"))) {
switchboard.removeReferences(plasmaURL.urlHash(url), query);
switchboard.wordIndex.removeReferences(query, plasmaURL.urlHash(url));
}
prop.put("text", error);
}
Expand Down
2 changes: 1 addition & 1 deletion htroot/yacysearch.java
Expand Up @@ -184,7 +184,7 @@ public static serverObjects respond(httpHeader header, serverObjects post, serve

// delete the index entry locally
final String delHash = post.get("deleteref", ""); // urlhash
sb.removeReferences(delHash, query);
sb.wordIndex.removeReferences(query, delHash);

// make new news message with negative voting
HashMap map = new HashMap();
Expand Down
42 changes: 21 additions & 21 deletions source/de/anomic/index/indexCachedRI.java
Expand Up @@ -80,7 +80,7 @@ public void flushControl() {
}

public long getUpdateTime(String wordHash) {
indexContainer entries = getContainer(wordHash, null, false, -1);
indexContainer entries = getContainer(wordHash, null, -1);
if (entries == null) return 0;
return entries.updated();
}
Expand Down Expand Up @@ -139,25 +139,25 @@ private void flushCache(indexRAMRI ram, int count) {
busyCacheFlush = false;
}

public indexContainer getContainer(String wordHash, Set urlselection, boolean deleteIfEmpty, long maxTime) {
public indexContainer getContainer(String wordHash, Set urlselection, long maxTime) {
// get from cache
indexContainer container = riExtern.getContainer(wordHash, urlselection, true, maxTime);
indexContainer container = riExtern.getContainer(wordHash, urlselection, maxTime);
if (container == null) {
container = riIntern.getContainer(wordHash, urlselection, true, maxTime);
container = riIntern.getContainer(wordHash, urlselection, maxTime);
} else {
container.add(riIntern.getContainer(wordHash, urlselection, true, maxTime), maxTime);
container.add(riIntern.getContainer(wordHash, urlselection, maxTime), maxTime);
}

// get from collection index
if (container == null) {
container = backend.getContainer(wordHash, urlselection, true, (maxTime < 0) ? -1 : maxTime);
container = backend.getContainer(wordHash, urlselection, (maxTime < 0) ? -1 : maxTime);
} else {
container.add(backend.getContainer(wordHash, urlselection, true, (maxTime < 0) ? -1 : maxTime), maxTime);
container.add(backend.getContainer(wordHash, urlselection, (maxTime < 0) ? -1 : maxTime), maxTime);
}
return container;
}

public Map getContainers(Set wordHashes, Set urlselection, boolean deleteIfEmpty, boolean interruptIfEmpty, long maxTime) {
public Map getContainers(Set wordHashes, Set urlselection, boolean interruptIfEmpty, long maxTime) {
// return map of wordhash:indexContainer

// retrieve entities that belong to the hashes
Expand All @@ -177,7 +177,7 @@ public Map getContainers(Set wordHashes, Set urlselection, boolean deleteIfEmpty
singleHash = (String) i.next();

// retrieve index
singleContainer = getContainer(singleHash, urlselection, deleteIfEmpty, (maxTime < 0) ? -1 : remaining / (wordHashes.size() - containers.size()));
singleContainer = getContainer(singleHash, urlselection, (maxTime < 0) ? -1 : remaining / (wordHashes.size() - containers.size()));

// check result
if (((singleContainer == null) || (singleContainer.size() == 0)) && (interruptIfEmpty)) return new HashMap();
Expand Down Expand Up @@ -213,27 +213,27 @@ public indexContainer deleteContainer(String wordHash) {
return c;
}

public boolean removeEntry(String wordHash, String urlHash, boolean deleteComplete) {
public boolean removeEntry(String wordHash, String urlHash) {
boolean removed = false;
removed = removed | (riIntern.removeEntry(wordHash, urlHash, deleteComplete));
removed = removed | (riExtern.removeEntry(wordHash, urlHash, deleteComplete));
removed = removed | (backend.removeEntry(wordHash, urlHash, deleteComplete));
removed = removed | (riIntern.removeEntry(wordHash, urlHash));
removed = removed | (riExtern.removeEntry(wordHash, urlHash));
removed = removed | (backend.removeEntry(wordHash, urlHash));
return removed;
}

public int removeEntries(String wordHash, Set urlHashes, boolean deleteComplete) {
public int removeEntries(String wordHash, Set urlHashes) {
int removed = 0;
removed += riIntern.removeEntries(wordHash, urlHashes, deleteComplete);
removed += riExtern.removeEntries(wordHash, urlHashes, deleteComplete);
removed += backend.removeEntries(wordHash, urlHashes, deleteComplete);
removed += riIntern.removeEntries(wordHash, urlHashes);
removed += riExtern.removeEntries(wordHash, urlHashes);
removed += backend.removeEntries(wordHash, urlHashes);
return removed;
}

public String removeEntriesExpl(String wordHash, Set urlHashes, boolean deleteComplete) {
public String removeEntriesExpl(String wordHash, Set urlHashes) {
String removed = "";
removed += riIntern.removeEntries(wordHash, urlHashes, deleteComplete) + ", ";
removed += riExtern.removeEntries(wordHash, urlHashes, deleteComplete) + ", ";
removed += backend.removeEntries(wordHash, urlHashes, deleteComplete) + ", ";
removed += riIntern.removeEntries(wordHash, urlHashes) + ", ";
removed += riExtern.removeEntries(wordHash, urlHashes) + ", ";
removed += backend.removeEntries(wordHash, urlHashes) + ", ";
return removed;
}

Expand Down
14 changes: 7 additions & 7 deletions source/de/anomic/index/indexCollectionRI.java
Expand Up @@ -61,7 +61,7 @@ public indexCollectionRI(File path, String filenameStub, long buffersize, long p
}

public long getUpdateTime(String wordHash) {
indexContainer entries = getContainer(wordHash, null, false, -1);
indexContainer entries = getContainer(wordHash, null, -1);
if (entries == null) return 0;
return entries.updated();
}
Expand Down Expand Up @@ -113,9 +113,9 @@ public void remove() {

}

public synchronized indexContainer getContainer(String wordHash, Set urlselection, boolean deleteIfEmpty, long maxtime) {
public synchronized indexContainer getContainer(String wordHash, Set urlselection, long maxtime) {
try {
kelondroRowSet collection = collectionIndex.get(wordHash.getBytes(), deleteIfEmpty);
kelondroRowSet collection = collectionIndex.get(wordHash.getBytes());
if (collection != null) collection.select(urlselection);
if ((collection == null) || (collection.size() == 0)) return null;
return new indexContainer(wordHash, collection);
Expand All @@ -134,15 +134,15 @@ public synchronized indexContainer deleteContainer(String wordHash) {
}
}

public synchronized boolean removeEntry(String wordHash, String urlHash, boolean deleteComplete) {
public synchronized boolean removeEntry(String wordHash, String urlHash) {
HashSet hs = new HashSet();
hs.add(urlHash.getBytes());
return removeEntries(wordHash, hs, deleteComplete) == 1;
return removeEntries(wordHash, hs) == 1;
}

public synchronized int removeEntries(String wordHash, Set urlHashes, boolean deleteComplete) {
public synchronized int removeEntries(String wordHash, Set urlHashes) {
try {
return collectionIndex.remove(wordHash.getBytes(), urlHashes, deleteComplete);
return collectionIndex.remove(wordHash.getBytes(), urlHashes);
} catch (kelondroOutOfLimitsException e) {
e.printStackTrace();
return 0;
Expand Down
4 changes: 2 additions & 2 deletions source/de/anomic/index/indexContainer.java
Expand Up @@ -145,12 +145,12 @@ public indexRWIEntry remove(String urlHash) {
return new indexRWIEntryNew(entry);
}

public boolean removeEntry(String wordHash, String urlHash, boolean deleteComplete) {
public boolean removeEntry(String wordHash, String urlHash) {
if (!wordHash.equals(this.wordHash)) return false;
return remove(urlHash) != null;
}

public int removeEntries(String wordHash, Set urlHashes, boolean deleteComplete) {
public int removeEntries(String wordHash, Set urlHashes) {
if (!wordHash.equals(this.wordHash)) return 0;
int count = 0;
Iterator i = urlHashes.iterator();
Expand Down
16 changes: 8 additions & 8 deletions source/de/anomic/index/indexRAMRI.java
Expand Up @@ -98,7 +98,7 @@ public indexRAMRI(File databaseRoot, kelondroRow payloadrow, int wCacheReference


public synchronized long getUpdateTime(String wordHash) {
indexContainer entries = getContainer(wordHash, null, false, -1);
indexContainer entries = getContainer(wordHash, null, -1);
if (entries == null) return 0;
return entries.updated();
}
Expand Down Expand Up @@ -334,7 +334,7 @@ private long longEmit(int intTime) {
return (((long) intTime) * (long) 1000) + initTime;
}

public synchronized indexContainer getContainer(String wordHash, Set urlselection, boolean deleteIfEmpty, long maxtime_dummy) {
public synchronized indexContainer getContainer(String wordHash, Set urlselection, long maxtime_dummy) {

// retrieve container
indexContainer container = (indexContainer) cache.get(wordHash);
Expand All @@ -359,11 +359,11 @@ public synchronized indexContainer deleteContainer(String wordHash) {
return container;
}

public synchronized boolean removeEntry(String wordHash, String urlHash, boolean deleteComplete) {
public synchronized boolean removeEntry(String wordHash, String urlHash) {
indexContainer c = (indexContainer) cache.get(wordHash);
if ((c != null) && (c.removeEntry(wordHash, urlHash, deleteComplete))) {
if ((c != null) && (c.removeEntry(wordHash, urlHash))) {
// removal successful
if ((c.size() == 0) && (deleteComplete)) {
if (c.size() == 0) {
deleteContainer(wordHash);
} else {
cache.put(wordHash, c);
Expand All @@ -375,13 +375,13 @@ public synchronized boolean removeEntry(String wordHash, String urlHash, boolean
return false;
}

public synchronized int removeEntries(String wordHash, Set urlHashes, boolean deleteComplete) {
public synchronized int removeEntries(String wordHash, Set urlHashes) {
if (urlHashes.size() == 0) return 0;
indexContainer c = (indexContainer) cache.get(wordHash);
int count;
if ((c != null) && ((count = c.removeEntries(wordHash, urlHashes, deleteComplete)) > 0)) {
if ((c != null) && ((count = c.removeEntries(wordHash, urlHashes)) > 0)) {
// removal successful
if ((c.size() == 0) && (deleteComplete)) {
if (c.size() == 0) {
deleteContainer(wordHash);
} else {
cache.put(wordHash, c);
Expand Down
6 changes: 3 additions & 3 deletions source/de/anomic/index/indexRI.java
Expand Up @@ -39,11 +39,11 @@ public interface indexRI {

public long getUpdateTime(String wordHash);
public int indexSize(String wordHash);
public indexContainer getContainer(String wordHash, Set urlselection, boolean deleteIfEmpty, long maxtime);
public indexContainer getContainer(String wordHash, Set urlselection, long maxtime);
public indexContainer deleteContainer(String wordHash);

public boolean removeEntry(String wordHash, String urlHash, boolean deleteComplete);
public int removeEntries(String wordHash, Set urlHashes, boolean deleteComplete);
public boolean removeEntry(String wordHash, String urlHash);
public int removeEntries(String wordHash, Set urlHashes);
public void addEntry(String wordHash, indexRWIEntry entry, long updateTime, boolean dhtCase);
public void addEntries(indexContainer newEntries, long creationTime, boolean dhtCase);

Expand Down
4 changes: 2 additions & 2 deletions source/de/anomic/kelondro/kelondroCollectionIndex.java
Expand Up @@ -262,7 +262,7 @@ public synchronized void merge(byte[] key, kelondroRowCollection collection) thr
putmergeremove(key, collection, true, null);
}

public synchronized int remove(byte[] key, Set removekeys, boolean deletecomplete) throws IOException, kelondroOutOfLimitsException {
public synchronized int remove(byte[] key, Set removekeys) throws IOException, kelondroOutOfLimitsException {
return putmergeremove(key, null, false, removekeys);
}

Expand Down Expand Up @@ -406,7 +406,7 @@ public synchronized int indexSize(byte[] key) throws IOException {
return (int) indexrow.getColLong(idx_col_chunkcount);
}

public synchronized kelondroRowSet get(byte[] key, boolean deleteIfEmpty) throws IOException {
public synchronized kelondroRowSet get(byte[] key) throws IOException {
// find an entry, if one exists
kelondroRow.Entry indexrow = index.get(key);
if (indexrow == null) return null;
Expand Down
13 changes: 5 additions & 8 deletions source/de/anomic/plasma/plasmaDHTChunk.java
Expand Up @@ -71,7 +71,6 @@ public class plasmaDHTChunk {

private plasmaWordIndex wordIndex;
private serverLog log;
private plasmaCrawlLURL lurls;

private int status = chunkStatus_UNDEFINED;
private String startPointHash;
Expand Down Expand Up @@ -124,11 +123,10 @@ public int getStatus() {
return this.status;
}

public plasmaDHTChunk(serverLog log, plasmaWordIndex wordIndex, plasmaCrawlLURL lurls, int minCount, int maxCount, int maxtime) {
public plasmaDHTChunk(serverLog log, plasmaWordIndex wordIndex, int minCount, int maxCount, int maxtime) {
try {
this.log = log;
this.wordIndex = wordIndex;
this.lurls = lurls;
this.startPointHash = selectTransferStart();
log.logFine("Selected hash " + this.startPointHash + " as start point for index distribution, distance = " + yacyDHTAction.dhtDistance(yacyCore.seedDB.mySeed.hash, this.startPointHash));
selectTransferContainers(this.startPointHash, minCount, maxCount, maxtime);
Expand All @@ -144,11 +142,10 @@ public plasmaDHTChunk(serverLog log, plasmaWordIndex wordIndex, plasmaCrawlLURL
}
}

public plasmaDHTChunk(serverLog log, plasmaWordIndex wordIndex, plasmaCrawlLURL lurls, int minCount, int maxCount, int maxtime, String startHash) {
public plasmaDHTChunk(serverLog log, plasmaWordIndex wordIndex, int minCount, int maxCount, int maxtime, String startHash) {
try {
this.log = log;
this.wordIndex = wordIndex;
this.lurls = lurls;
log.logFine("Demanded hash " + startHash + " as start point for index distribution, distance = " + yacyDHTAction.dhtDistance(yacyCore.seedDB.mySeed.hash, this.startPointHash));
selectTransferContainers(startHash, minCount, maxCount, maxtime);

Expand Down Expand Up @@ -233,12 +230,12 @@ private int selectTransferContainersResource(String hash, boolean ram, int maxco
urlIter.remove();
continue;
}
lurl = lurls.load(iEntry.urlHash(), iEntry);
lurl = wordIndex.loadedURL.load(iEntry.urlHash(), iEntry);
if ((lurl == null) || (lurl.comp().url() == null)) {
//yacyCore.log.logFine("DEBUG selectTransferContainersResource: not-bound url hash '" + iEntry.urlHash() + "' for word hash " + container.getWordHash());
notBoundCounter++;
urlIter.remove();
wordIndex.removeEntry(container.getWordHash(), iEntry.urlHash(), true);
wordIndex.removeEntry(container.getWordHash(), iEntry.urlHash());
} else {
urlCache.put(iEntry.urlHash(), lurl);
//yacyCore.log.logFine("DEBUG selectTransferContainersResource: added url hash '" + iEntry.urlHash() + "' to urlCache for word hash " + container.getWordHash());
Expand Down Expand Up @@ -302,7 +299,7 @@ public synchronized String deleteTransferIndexes() {
urlHashes.add(iEntry.urlHash());
}
String wordHash = indexContainers[i].getWordHash();
count = wordIndex.removeEntriesExpl(this.indexContainers[i].getWordHash(), urlHashes, true);
count = wordIndex.removeEntriesExpl(this.indexContainers[i].getWordHash(), urlHashes);
if (log.isFine())
log.logFine("Deleted partial index (" + c + " URLs) for word " + wordHash + "; " + this.wordIndex.indexSize(wordHash) + " entries left");
this.indexContainers[i] = null;
Expand Down

0 comments on commit 9a85f5a

Please sign in to comment.