Skip to content

Commit

Permalink
#11 fixed FileListDataStore.
Browse files Browse the repository at this point in the history
  • Loading branch information
marevol committed Oct 5, 2013
1 parent 88846fa commit 3277dbe
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 9 deletions.
2 changes: 2 additions & 0 deletions src/main/java/jp/sf/fess/Constants.java
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,8 @@ public class Constants extends CoreLibConstants {

public static final String DATA_INDEX_SIZE = "DataIndexSize";

public static final String SESSION_ID = "sessionId";

public static final String INDEXING_TARGET = "indexingTarget";

public static final String DIGEST_PREFIX = "...";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public void store(final DataCrawlingConfig config,
// default values
final Map<String, Object> defaultDataMap = new HashMap<String, Object>();
// segment
defaultDataMap.put("segment", initParamMap.get("sessionId"));
defaultDataMap.put("segment", initParamMap.get(Constants.SESSION_ID));
// tstamp
defaultDataMap.put("tstamp", "NOW");
// boost
Expand Down
22 changes: 17 additions & 5 deletions src/main/java/jp/sf/fess/ds/impl/FileListDataStoreImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,9 @@ public class FileListDataStoreImpl extends CsvDataStoreImpl {
public Map<String, String> parentEncodingMap = Collections
.synchronizedMap(new LruHashMap<String, String>(1000));

public String[] ignoreFieldNames = new String[] {
Constants.INDEXING_TARGET, Constants.SESSION_ID };

@Override
protected boolean isCsvFile(final File parentFile, final String filename) {
if (super.isCsvFile(parentFile, filename)) {
Expand Down Expand Up @@ -233,9 +236,12 @@ protected void storeData(final IndexUpdateCallback callback,
headerName = paramMap.get(S2ROBOT_WEB_HEADER_PREFIX + count
+ ".name");
}
initParamMap.put(HcHttpClient.REQUERT_HEADERS_PROPERTY, rhList
.toArray(new org.seasar.robot.client.http.RequestHeader[rhList
.size()]));
if (!rhList.isEmpty()) {
initParamMap
.put(HcHttpClient.REQUERT_HEADERS_PROPERTY,
rhList.toArray(new org.seasar.robot.client.http.RequestHeader[rhList
.size()]));
}

// file auth
final String fileAuthStr = paramMap.get(S2ROBOT_FILE_AUTH);
Expand Down Expand Up @@ -337,7 +343,7 @@ protected FileListIndexUpdateCallback(

@Override
public boolean store(final Map<String, Object> dataMap) {
final Object eventType = dataMap.get(eventTypeField);
final Object eventType = dataMap.remove(eventTypeField);

if (createEventName.equals(eventType)
|| modifyEventName.equals(eventType)) {
Expand Down Expand Up @@ -375,7 +381,8 @@ protected boolean addDocument(final Map<String, Object> dataMap) {
final ResponseData responseData = client.doGet(url);
responseData.setExecutionTime(System.currentTimeMillis()
- startTime);
responseData.setSessionId((String) dataMap.get("sessionId"));
responseData.setSessionId((String) dataMap
.get(Constants.SESSION_ID));

final RuleManager ruleManager = SingletonS2Container
.getComponent(RuleManager.class);
Expand Down Expand Up @@ -406,6 +413,11 @@ protected boolean addDocument(final Map<String, Object> dataMap) {
}
}

// remove
for (final String fieldName : ignoreFieldNames) {
dataMap.remove(fieldName);
}

return indexUpdateCallback.store(dataMap);
} else {
logger.warn("The response processor is not DefaultResponseProcessor. responseProcessor: "
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/jp/sf/fess/helper/DataIndexHelper.java
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ protected void crawl(final String sessionId, final SolrGroup solrGroup,
dataCrawlingConfig);
sessionIdList.add(sid);

initParamMap.put("sessionId", sessionId);
initParamMap.put(Constants.SESSION_ID, sessionId);
initParamMap.put("crawlingSessionId", sid);

final DataCrawlingThread dataCrawlingThread = new DataCrawlingThread(
Expand Down
6 changes: 6 additions & 0 deletions src/main/java/jp/sf/fess/helper/SystemHelper.java
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,12 @@ public void executeCrawler(final String sessionId,
logger.info("Crawler: Exit Code=" + exitValue
+ " - Crawler Process Output:\n" + it.getOutput());
}
if (exitValue != 0) {
throw new FessSystemException("Exit code is " + exitValue
+ "\nOutput:\n" + it.getOutput());
}
} catch (final FessSystemException e) {
throw e;
} catch (final InterruptedException e) {
logger.warn("Crawler Process interrupted.");
} catch (final Exception e) {
Expand Down
35 changes: 33 additions & 2 deletions src/main/java/jp/sf/fess/job/CrawlJob.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.util.Date;

import jp.sf.fess.Constants;
import jp.sf.fess.FessSystemException;
import jp.sf.fess.helper.CrawlingSessionHelper;
import jp.sf.fess.helper.SystemHelper;
import jp.sf.fess.job.JobExecutor.ShutdownListener;
Expand Down Expand Up @@ -56,6 +57,35 @@ public String execute(final JobExecutor jobExecutor,
final String operation) {
final StringBuilder resultBuf = new StringBuilder();

resultBuf.append("Session Id: ").append(sessionId).append("\n");
resultBuf.append("Web Config Id:");
if (webConfigIds == null) {
resultBuf.append(" ALL\n");
} else {
for (final String id : webConfigIds) {
resultBuf.append(' ').append(id);
}
resultBuf.append('\n');
}
resultBuf.append("File Config Id:");
if (fileConfigIds == null) {
resultBuf.append(" ALL\n");
} else {
for (final String id : fileConfigIds) {
resultBuf.append(' ').append(id);
}
resultBuf.append('\n');
}
resultBuf.append("Data Config Id:");
if (dataConfigIds == null) {
resultBuf.append(" ALL\n");
} else {
for (final String id : dataConfigIds) {
resultBuf.append(' ').append(id);
}
resultBuf.append('\n');
}

if (jobExecutor != null) {
jobExecutor.addShutdownListener(new ShutdownListener() {
@Override
Expand All @@ -74,9 +104,10 @@ public void onShutdown() {
SingletonS2Container.getComponent(SystemHelper.class)
.executeCrawler(sessionId, webConfigIds, fileConfigIds,
dataConfigIds, operation);
} catch (final FessSystemException e) {
throw e;
} catch (final Exception e) {
logger.error("Failed to execute a crawl job.", e);
resultBuf.append(e.getMessage());
throw new FessSystemException("Failed to execute a crawl job.", e);
}

return resultBuf.toString();
Expand Down

0 comments on commit 3277dbe

Please sign in to comment.