Skip to content

Commit

Permalink
some initial work on creating an onSuccess method #4425
Browse files Browse the repository at this point in the history
  • Loading branch information
ferrys committed Jan 19, 2018
1 parent 7eec754 commit b64a487
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 45 deletions.
19 changes: 16 additions & 3 deletions src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
Expand Up @@ -35,6 +35,7 @@
import javax.ejb.EJBException;
import javax.ejb.TransactionAttribute;
import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.validation.ConstraintViolation;
Expand Down Expand Up @@ -248,16 +249,28 @@ public <R> R submit(Command<R> aCommand) throws CommandException {
throw re;

} finally {
logRec.setEndTime( new java.util.Date() );

if ( logRec.getActionResult() == null ) {
logRec.setActionResult( ActionLogRecord.Result.OK );
logSvc.log(logRec);
try {
aCommand.onSuccess(getContext());
} catch (Exception ex){

}
} else {
ejbCtxt.setRollbackOnly();
logSvc.log(logRec);
}
logRec.setEndTime( new java.util.Date() );
logSvc.log(logRec);
}
}


// @TransactionAttribute(REQUIRES_NEW)
// public void onSuccess(Command aCommand) {
// aCommand.onSuccess(getContext());
// }

public CommandContext getContext() {
if (ctxt == null) {
ctxt = new CommandContext() {
Expand Down
Expand Up @@ -68,6 +68,12 @@ public Map<String, DvObject> getAffectedDvObjects() {
public DataverseRequest getRequest() {
return request;
}

@Override
public void onSuccess(CommandContext ctxt){
//default behavior is to not do anything
//useful specific cases in commands such as indexing or sending emails
}

@Override
public Map<String, Set<Permission>> getRequiredPermissions() {
Expand Down
Expand Up @@ -40,7 +40,13 @@ public interface Command<R> {
/**
* @return A map of the permissions required for this command
*/
Map<String,Set<Permission>> getRequiredPermissions();
Map<String,Set<Permission>> getRequiredPermissions();

public String describe();
public String describe();

/**
* Completes actions that should happen after the transaction
* has committed to the database
*/
public void onSuccess(CommandContext ctxt);
}
Expand Up @@ -35,11 +35,12 @@ public class CreateDatasetCommand extends AbstractCommand<Dataset> {

private static final Logger logger = Logger.getLogger(CreateDatasetCommand.class.getCanonicalName());

private final Dataset theDataset;
private Dataset theDataset;
private final boolean registrationRequired;
// TODO: rather than have a boolean, create a sub-command for creating a dataset during import
private final ImportUtil.ImportType importType;
private final Template template;
private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss");

public CreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest) {
super(aRequest, theDataset.getOwner());
Expand Down Expand Up @@ -75,7 +76,6 @@ public CreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boole

@Override
public Dataset execute(CommandContext ctxt) throws CommandException {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss");

IdServiceBean idServiceBean = IdServiceBean.getBean(theDataset.getProtocol(), ctxt);

Expand Down Expand Up @@ -202,16 +202,16 @@ by the Dataset page (in CREATE mode), it already has the persistent
throw new IllegalCommandException("Dataset could not be created. Registration failed", this);
}
logger.log(Level.FINE, "after doi {0}", formatter.format(new Date().getTime()));
Dataset savedDataset = ctxt.em().merge(theDataset);
theDataset = ctxt.em().merge(theDataset);
logger.log(Level.FINE, "after db update {0}", formatter.format(new Date().getTime()));
// set the role to be default contributor role for its dataverse
if (importType == null || importType.equals(ImportType.NEW)) {
String privateUrlToken = null;
ctxt.roles().save(new RoleAssignment(savedDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), savedDataset, privateUrlToken));
ctxt.roles().save(new RoleAssignment(theDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), theDataset, privateUrlToken));
}

savedDataset.setPermissionModificationTime(new Timestamp(new Date().getTime()));
savedDataset = ctxt.em().merge(savedDataset);
theDataset.setPermissionModificationTime(new Timestamp(new Date().getTime()));
theDataset = ctxt.em().merge(theDataset);

//If the Id type is sequential and Dependent then write file idenitifiers outside the command
String datasetIdentifier = theDataset.getIdentifier();
Expand All @@ -221,7 +221,7 @@ by the Dataset page (in CREATE mode), it already has the persistent
maxIdentifier = ctxt.datasets().getMaximumExistingDatafileIdentifier(theDataset);
}
String dataFileIdentifier = null;
for (DataFile dataFile : savedDataset.getFiles()) {
for (DataFile dataFile : theDataset.getFiles()) {
if (maxIdentifier != null) {
maxIdentifier++;
dataFileIdentifier = datasetIdentifier + "/" + maxIdentifier.toString();
Expand All @@ -236,60 +236,34 @@ by the Dataset page (in CREATE mode), it already has the persistent
logger.fine("Checking if rsync support is enabled.");
if (DataCaptureModuleUtil.rsyncSupportEnabled(ctxt.settings().getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
try {
ScriptRequestResponse scriptRequestResponse = ctxt.engine().submit(new RequestRsyncScriptCommand(getRequest(), savedDataset));
ScriptRequestResponse scriptRequestResponse = ctxt.engine().submit(new RequestRsyncScriptCommand(getRequest(), theDataset));
logger.fine("script: " + scriptRequestResponse.getScript());
} catch (RuntimeException ex) {
logger.info("Problem getting rsync script: " + ex.getLocalizedMessage());
}
}
logger.fine("Done with rsync request, if any.");

try {
/**
* @todo Do something with the result. Did it succeed or fail?
*/
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(savedDataset, doNormalSolrDocCleanUp);

} catch (Exception e) { // RuntimeException e ) {
logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage()); //, e);
/**
* Even though the original intention appears to have been to allow
* the dataset to be successfully created, even if an exception is
* thrown during the indexing - in reality, a runtime exception
* there, even caught, still forces the EJB transaction to be rolled
* back; hence the dataset is NOT created... but the command
* completes and exits as if it has been successful. So I am going
* to throw a Command Exception here, to avoid this. If we DO want
* to be able to create datasets even if they cannot be immediately
* indexed, we'll have to figure out how to do that. (Note that
* import is still possible when Solr is down - because
* indexDataset() does NOT throw an exception if it is. -- L.A. 4.5
*/
throw new CommandException("Dataset could not be created. Indexing failed", this);

}
logger.log(Level.FINE, "after index {0}", formatter.format(new Date().getTime()));


// if we are not migrating, assign the user to this version
if (importType == null || importType.equals(ImportType.NEW)) {
DatasetVersionUser datasetVersionDataverseUser = new DatasetVersionUser();
String id = getRequest().getUser().getIdentifier();
id = id.startsWith("@") ? id.substring(1) : id;
AuthenticatedUser au = ctxt.authentication().getAuthenticatedUser(id);
datasetVersionDataverseUser.setAuthenticatedUser(au);
datasetVersionDataverseUser.setDatasetVersion(savedDataset.getLatestVersion());
datasetVersionDataverseUser.setDatasetVersion(theDataset.getLatestVersion());
datasetVersionDataverseUser.setLastUpdateDate(createDate);
if (savedDataset.getLatestVersion().getId() == null) {
logger.warning("CreateDatasetCommand: savedDataset version id is null");
if (theDataset.getLatestVersion().getId() == null) {
logger.warning("CreateDatasetCommand: theDataset version id is null");
} else {
datasetVersionDataverseUser.setDatasetVersion(savedDataset.getLatestVersion());
datasetVersionDataverseUser.setDatasetVersion(theDataset.getLatestVersion());
}
ctxt.em().merge(datasetVersionDataverseUser);
}
logger.log(Level.FINE, "after create version user " + formatter.format(new Date().getTime()));
// throw new CommandException("trying to break the command structure from create dataset command", this);
return savedDataset;
return theDataset;
}

@Override
Expand All @@ -315,4 +289,41 @@ public boolean equals(Object obj) {
public String toString() {
return "[DatasetCreate dataset:" + theDataset.getId() + "]";
}

@Override
public void onSuccess(CommandContext ctxt){
if (true) {
throw new RuntimeException("Breaking CreateDatasetCommand in onSuccess");
}
// try {
/**
* @todo Do something with the result. Did it succeed or fail?
*/
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(theDataset, doNormalSolrDocCleanUp);

/**
} catch (Exception e) { // RuntimeException e ) {
logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage()); //, e);
* Even though the original intention appears to have been to allow
* the dataset to be successfully created, even if an exception is
* thrown during the indexing - in reality, a runtime exception
* there, even caught, still forces the EJB transaction to be rolled
* back; hence the dataset is NOT created... but the command
* completes and exits as if it has been successful. So I am going
* to throw a Command Exception here, to avoid this. If we DO want
* to be able to create datasets even if they cannot be immediately
* indexed, we'll have to figure out how to do that. (Note that
* import is still possible when Solr is down - because
* indexDataset() does NOT throw an exception if it is. -- L.A. 4.5
throw new CommandException("Dataset could not be created. Indexing failed", this);
}
*/
logger.log(Level.FINE, "after index {0}", formatter.format(new Date().getTime()));

}

}

0 comments on commit b64a487

Please sign in to comment.