Skip to content

Commit

Permalink
Use @schedule for dataset Export job. Relates to #5345.
Browse files Browse the repository at this point in the history
Removes the timer handling code for the export of Datasets and OAISets from
DataverseTimerServiceBean and uses non-persisting @schedule annotation instead.
Checks for execution on timer server only are in place.
  • Loading branch information
poikilotherm committed Dec 17, 2018
1 parent f62bddd commit a7cc7b8
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 50 deletions.
18 changes: 11 additions & 7 deletions src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,7 @@
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.Asynchronous;
import javax.ejb.EJB;
import javax.ejb.EJBException;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.ejb.*;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
Expand Down Expand Up @@ -576,8 +571,17 @@ public void exportAllAsync() {
exportAllDatasets(false);
}

/**
* Scheduled function triggering the export of all local & published datasets,
* but only on the node which is configured as master timer.
*/
@Lock(LockType.READ)
@Schedule(hour = "2", persistent = false)
public void exportAll() {
exportAllDatasets(false);
if (systemConfig.isTimerServer()) {
logger.info("DatasetService: Running a scheduled export job.");
exportAllDatasets(false);
}
}

public void exportAllDatasets(boolean forceReExport) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,7 @@
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.Asynchronous;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.ejb.*;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
Expand Down Expand Up @@ -185,9 +181,23 @@ public void exportOaiSet(OAISet oaiSet, Logger exportLogger) {
//}
managedSet.setUpdateInProgress(false);

}
}

/**
* Scheduled export of all local & published datasets for OAI interface harvesting.
* Only runs on the node configured as timer master.
*/
@Lock(LockType.READ)
@Schedule(hour = "2", persistent = false)
public void exportAllSets() {
// In case this node is not the timer server, skip silently.
if (!systemConfig.isTimerServer()) {
return;
}
logger.info("OAISetService: Running a scheduled export job.");

// TODO: this should be refactored to handle container usage, where these logs should not be
// saved locally, but get streamed to a handler like STDOUT.
String logTimestamp = logFormatter.format(new Date());
Logger exportLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.OAISetServiceBean." + "UpdateAllSets." + logTimestamp);
String logFileName = "../logs" + File.separator + "oaiSetsUpdate_" + logTimestamp + ".log";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,6 @@ public class DataverseTimerServiceBean implements Serializable {
@EJB
AuthenticationServiceBean authSvc;
@EJB
DatasetServiceBean datasetService;
@EJB
OAISetServiceBean oaiSetService;
@EJB
SystemConfig systemConfig;


Expand All @@ -84,8 +80,6 @@ public void init() {
removeAllTimers();
// create mother timer:
createMotherTimer();
// And the export timer (there is only one)
createExportTimer();

} else {
logger.info("Skipping timer server init (I am not the dedicated timer server)");
Expand Down Expand Up @@ -162,18 +156,6 @@ public void handleTimeout(javax.ejb.Timer timer) {
//dataverseService.setHarvestResult(info.getHarvestingDataverseId(), harvesterService.HARVEST_RESULT_FAILED);
//mailService.sendHarvestErrorNotification(dataverseService.find().getSystemEmail(), dataverseService.find().getName());
logException(e, logger);
}
} else if (timer.getInfo() instanceof ExportTimerInfo) {
try {
ExportTimerInfo info = (ExportTimerInfo) timer.getInfo();
logger.info("Timer Service: Running a scheduled export job.");

// try to export all unexported datasets:
datasetService.exportAll();
// and update all oai sets:
oaiSetService.exportAllSets();
} catch (Throwable e) {
logException(e, logger);
}
}

Expand Down Expand Up @@ -285,25 +267,6 @@ public void removeHarvestTimer(HarvestingClient harvestingClient) {
}
}

public void createExportTimer() {
ExportTimerInfo info = new ExportTimerInfo();
Calendar initExpiration = Calendar.getInstance();
long intervalDuration = 24 * 60 * 60 * 1000; // every day
initExpiration.set(Calendar.MINUTE, 0);
initExpiration.set(Calendar.SECOND, 0);
initExpiration.set(Calendar.HOUR_OF_DAY, 2); // 2AM, fixed.


Date initExpirationDate = initExpiration.getTime();
Date currTime = new Date();
if (initExpirationDate.before(currTime)) {
initExpirationDate.setTime(initExpiration.getTimeInMillis() + intervalDuration);
}

logger.info("Setting the Export Timer, initial expiration: " + initExpirationDate);
createTimer(initExpirationDate, intervalDuration, info);
}

/* Utility methods: */
private void logException(Throwable e, Logger logger) {

Expand Down

0 comments on commit a7cc7b8

Please sign in to comment.