diff --git a/.gitignore b/.gitignore index 390a4a56ce0..ab38fe5c148 100644 --- a/.gitignore +++ b/.gitignore @@ -22,4 +22,6 @@ scripts/api/py_api_wrapper/local-data/* doc/sphinx-guides/build faces-config.NavData src/main/java/BuildNumber.properties +scripts/installer/dvinstall.zip +scripts/installer/dvinstall/ /nbproject/ \ No newline at end of file diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 14dc908e0ab..53f241f4360 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -86,9 +86,9 @@ Non-superusers who are not "Admin" on the root dataverse will not be able to to Persistent Identifiers and Publishing Datasets ++++++++++++++++++++++++++++++++++++++++++++++ -Persistent identifiers are a required and integral part of the Dataverse platform. They provide a URL that is guaranteed to resolve to the datasets they represent. Dataverse currently supports creating identifiers using DOI and additionally displaying identifiers created using HDL. By default and for testing convenience, the installer configures a temporary DOI test namespace through EZID. This is sufficient to create and publish datasets but they are not citable nor guaranteed to be preserved. To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from one of two DOI providers: EZID (http://ezid.cdlib.org) or DataCite (https://www.datacite.org). Once account credentials and DOI namespace have been acquired, please complete the following identifier configuration parameters: +Persistent identifiers are a required and integral part of the Dataverse platform. They provide a URL that is guaranteed to resolve to the datasets they represent. Dataverse currently supports creating identifiers using DOI and HDL. By default and for testing convenience, the installer configures a temporary DOI test namespace through EZID. This is sufficient to create and publish datasets but they are not citable nor guaranteed to be preserved. To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider: EZID (http://ezid.cdlib.org), DataCite (https://www.datacite.org) (https://www.handle.net). Once account credentials and namespace have been acquired, please complete the following identifier configuration parameters: -JVM Options: :ref:`doi.baseurlstring`, :ref:`doi.username`, :ref:`doi.password` +JVM Options: :ref:`doi.baseurlstring`, :ref:`doi.username`, :ref:`doi.password`, :ref:`dataverse.handlenet.admcredfile`, :ref:`dataverse.handlenet.admprivphrase` Database Settings: :ref:`:DoiProvider <:DoiProvider>`, :ref:`:Protocol <:Protocol>`, :ref:`:Authority <:Authority>`, :ref:`:DoiSeparator <:DoiSeparator>` @@ -218,11 +218,11 @@ Used in conjuction with ``doi.baseurlstring``. dataverse.handlenet.admcredfile +++++++++++++++++++++++++++++++ -For Handle support (not fully developed). +For Handle support, typically the full path to handle/svr_1/admpriv.bin dataverse.handlenet.admprivphrase +++++++++++++++++++++++++++++++++ -For Handle support (not fully developed). +For Handle support. Database Settings ----------------- @@ -416,7 +416,7 @@ The relative path URL to which users will be sent after signup. The default sett The location of your TwoRavens installation. Activation of TwoRavens also requires the setting below, ``TwoRavensTabularView`` :TwoRavensTabularView -+++++++++++++++++++++ ++++++++++++++++++++ Set ``TwoRavensTabularView`` to true to allow a user to view tabular files via the TwoRavens application. This boolean affects whether a user will see the "Explore" button. diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractPersistentIdRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractPersistentIdRegistrationServiceBean.java new file mode 100644 index 00000000000..5a21ae276e0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/AbstractPersistentIdRegistrationServiceBean.java @@ -0,0 +1,112 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.SystemConfig; + +import javax.ejb.EJB; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; + +public abstract class AbstractPersistentIdRegistrationServiceBean implements PersistentIdRegistrationServiceBean { + + private static final Logger logger = Logger.getLogger(AbstractPersistentIdRegistrationServiceBean.class.getCanonicalName()); + + @EJB + DataverseServiceBean dataverseService; + @EJB + SettingsServiceBean settingsService; + @EJB + SystemConfig systemConfig; + + @Override + public String getIdentifierForLookup(String protocol, String authority, String separator, String identifier) { + logger.log(Level.FINE,"getIdentifierForLookup"); + return protocol + ":" + authority + separator + identifier; + } + + @Override + public HashMap getMetadataFromStudyForCreateIndicator(Dataset datasetIn) { + logger.log(Level.FINE,"getMetadataFromStudyForCreateIndicator"); + HashMap metadata = new HashMap<>(); + + String authorString = datasetIn.getLatestVersion().getAuthorsStr(); + + if (authorString.isEmpty()) { + authorString = ":unav"; + } + + String producerString = dataverseService.findRootDataverse().getName() + " Dataverse"; + + if (producerString.isEmpty()) { + producerString = ":unav"; + } + metadata.put("datacite.creator", authorString); + metadata.put("datacite.title", datasetIn.getLatestVersion().getTitle()); + metadata.put("datacite.publisher", producerString); + metadata.put("datacite.publicationyear", generateYear()); + metadata.put("_target", getTargetUrl(datasetIn)); + return metadata; + } + + protected HashMap getUpdateMetadataFromDataset(Dataset datasetIn) { + logger.log(Level.FINE,"getUpdateMetadataFromDataset"); + HashMap metadata = new HashMap<>(); + + String authorString = datasetIn.getLatestVersion().getAuthorsStr(); + + if (authorString.isEmpty()) { + authorString = ":unav"; + } + + String producerString = dataverseService.findRootDataverse().getName() + " Dataverse"; + + if(producerString.isEmpty()) { + producerString = ":unav"; + } + metadata.put("datacite.creator", authorString); + metadata.put("datacite.title", datasetIn.getLatestVersion().getTitle()); + metadata.put("datacite.publisher", producerString); + + return metadata; + } + + @Override + public HashMap getMetadataFromDatasetForTargetURL(Dataset datasetIn) { + logger.log(Level.FINE,"getMetadataFromDatasetForTargetURL"); + HashMap metadata = new HashMap<>(); + metadata.put("_target", getTargetUrl(datasetIn)); + return metadata; + } + + protected String getTargetUrl(Dataset datasetIn) { + logger.log(Level.FINE,"getTargetUrl"); + return systemConfig.getDataverseSiteUrl() + Dataset.TARGET_URL + datasetIn.getGlobalId(); + } + + @Override + public String getIdentifierFromDataset(Dataset dataset) { + logger.log(Level.FINE,"getIdentifierFromDataset"); + return dataset.getGlobalId(); + } + + private String generateYear() + { + StringBuilder guid = new StringBuilder(); + + // Create a calendar to get the date formatted properly + String[] ids = TimeZone.getAvailableIDs(-8 * 60 * 60 * 1000); + SimpleTimeZone pdt = new SimpleTimeZone(-8 * 60 * 60 * 1000, ids[0]); + pdt.setStartRule(Calendar.APRIL, 1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); + pdt.setEndRule(Calendar.OCTOBER, -1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); + Calendar calendar = new GregorianCalendar(pdt); + Date trialTime = new Date(); + calendar.setTime(trialTime); + guid.append(calendar.get(Calendar.YEAR)); + + return guid.toString(); + } + + @Override + public void postDeleteCleanup(final Dataset doomed){} +} diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java index fa070b9cbec..ff973e58eb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java @@ -5,16 +5,7 @@ */ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.SystemConfig; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.Calendar; -import java.util.Date; -import java.util.GregorianCalendar; -import java.util.HashMap; -import java.util.SimpleTimeZone; -import java.util.TimeZone; +import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; @@ -25,149 +16,166 @@ * @author luopc */ @Stateless -public class DOIDataCiteServiceBean { - - private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.DOIDataCiteServiceBean"); +public class DOIDataCiteServiceBean extends AbstractPersistentIdRegistrationServiceBean { + private static final Logger logger = Logger.getLogger(DOIDataCiteServiceBean.class.getCanonicalName()); - @EJB - DataverseServiceBean dataverseService; - @EJB - SettingsServiceBean settingsService; @EJB DOIDataCiteRegisterService doiDataCiteRegisterService; - @EJB - SystemConfig systemConfig; - - private String DOISHOULDER = ""; - + public DOIDataCiteServiceBean() { - } - - public boolean alreadyExists (Dataset dataset){ + + @Override + public boolean registerWhenPublished() { + return true; + } + + @Override + public boolean alreadyExists(Dataset dataset) { + logger.log(Level.FINE,"alreadyExists"); boolean alreadyExists; String identifier = getIdentifierFromDataset(dataset); try{ alreadyExists = doiDataCiteRegisterService.testDOIExists(identifier); } catch (Exception e){ - logger.log(Level.INFO, "alreadyExists failed"); + logger.log(Level.WARNING, "alreadyExists failed"); return false; } return alreadyExists; } + @Override public String createIdentifier(Dataset dataset) throws Exception { - String retString = ""; + logger.log(Level.FINE,"createIdentifier"); String identifier = getIdentifierFromDataset(dataset); - HashMap metadata = getMetadataFromStudyForCreateIndicator(dataset); - metadata.put("_status", "reserved"); + HashMap metadata = getMetadataFromStudyForCreateIndicator(dataset); + metadata.put("_status", "reserved"); try { - retString = doiDataCiteRegisterService.createIdentifier(identifier, metadata, dataset); + String retString = doiDataCiteRegisterService.createIdentifier(identifier, metadata, dataset); + logger.log(Level.FINE, "create DOI identifier retString : " + retString); + return retString; } catch (Exception e) { - logger.log(Level.INFO, "Identifier not created: create failed"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); + logger.log(Level.WARNING, "Identifier not created: create failed"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); throw e; -// return "Identifier not created " + e.getLocalizedMessage(); } - return retString; } - - public HashMap getIdentifierMetadata(Dataset dataset) { + + @Override + public HashMap getIdentifierMetadata(Dataset dataset) { + logger.log(Level.FINE,"getIdentifierMetadata"); String identifier = getIdentifierFromDataset(dataset); - HashMap metadata = new HashMap(); + HashMap metadata = new HashMap<>(); try { metadata = doiDataCiteRegisterService.getMetadata(identifier); } catch (Exception e) { - logger.log(Level.INFO, "getIdentifierMetadata failed"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); + logger.log(Level.WARNING, "getIdentifierMetadata failed"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); return metadata; } return metadata; } - public HashMap lookupMetadataFromIdentifier(String protocol, String authority, String separator, String identifier) { + /** + * Looks up the metadata for a Global Identifier + * @param protocol the identifier system, e.g. "doi" or "hdl" + * @param authority the namespace that the authority manages in the identifier system + * @param separator the string that separates authority from local identifier part + * @param identifier the local identifier part + * @return a Map of metadata. It is empty when the lookup failed, e.g. when + * the identifier does not exist. + */ + @Override + public HashMap lookupMetadataFromIdentifier(String protocol, String authority, String separator, String identifier) { + logger.log(Level.FINE,"lookupMetadataFromIdentifier"); String identifierOut = getIdentifierForLookup(protocol, authority, separator, identifier); - HashMap metadata = new HashMap(); + HashMap metadata = new HashMap<>(); try { metadata = doiDataCiteRegisterService.getMetadata(identifierOut); } catch (Exception e) { - logger.log(Level.INFO, "None existing so we can use this identifier"); - logger.log(Level.INFO, "identifier: {0}", identifierOut); + logger.log(Level.WARNING, "None existing so we can use this identifier"); + logger.log(Level.WARNING, "identifier: {0}", identifierOut); return metadata; } return metadata; } - public String getIdentifierForLookup(String protocol, String authority, String separator, String identifier) { - return protocol + ":" + authority + separator + identifier; - } - public String modifyIdentifier(Dataset dataset, HashMap metadata) throws Exception { + /** + * Modifies the EZID metadata for a Dataset + * @param dataset the Dataset whose metadata needs to be modified + * @param metadata the new metadata for the Dataset + * @return the Dataset identifier, or null if the modification failed + */ + @Override + public String modifyIdentifier(Dataset dataset, HashMap metadata) throws Exception { + logger.log(Level.FINE,"modifyIdentifier"); String identifier = getIdentifierFromDataset(dataset); try { doiDataCiteRegisterService.createIdentifier(identifier, metadata, dataset); } catch (Exception e) { - logger.log(Level.INFO, "modifyMetadata failed"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); + logger.log(Level.WARNING, "modifyMetadata failed"); + logger.log(Level.WARNING, "String " + e.toString()); + logger.log(Level.WARNING, "localized message " + e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause " + e.getCause()); + logger.log(Level.WARNING, "message " + e.getMessage()); throw e; } return identifier; } - - public void deleteRecordFromCache(Dataset datasetIn){ + + @Override + public void postDeleteCleanup(final Dataset datasetIn){ + logger.log(Level.FINE,"deleteRecordFromCache"); String identifier = getIdentifierFromDataset(datasetIn); HashMap doiMetadata = new HashMap(); try { doiMetadata = doiDataCiteRegisterService.getMetadata(identifier); } catch (Exception e) { - logger.log(Level.INFO, "get matadata failed cannot delete"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); + logger.log(Level.WARNING, "get matadata failed cannot delete"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); } String idStatus = (String) doiMetadata.get("_status"); if (idStatus == null || idStatus.equals("reserved")) { - logger.log(Level.INFO, "Delete status is reserved.."); + logger.log(Level.WARNING, "Delete status is reserved.."); try { doiDataCiteRegisterService.deleteIdentifier(identifier); } catch (Exception e) { - logger.log(Level.INFO, "delete failed"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); + logger.log(Level.WARNING, "delete failed"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); throw new RuntimeException(e); } - return; } - } - + @Override public void deleteIdentifier(Dataset datasetIn) throws Exception { + logger.log(Level.FINE,"deleteIdentifier"); String identifier = getIdentifierFromDataset(datasetIn); - HashMap doiMetadata = new HashMap(); + HashMap doiMetadata = new HashMap(); try { doiMetadata = doiDataCiteRegisterService.getMetadata(identifier); } catch (Exception e) { - logger.log(Level.INFO, "get matadata failed cannot delete"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); + logger.log(Level.WARNING, "get matadata failed cannot delete"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause ", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); } String idStatus = (String) doiMetadata.get("_status"); @@ -177,11 +185,11 @@ public void deleteIdentifier(Dataset datasetIn) throws Exception { try { doiDataCiteRegisterService.deleteIdentifier(identifier); } catch (Exception e) { - logger.log(Level.INFO, "delete failed"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); + logger.log(Level.WARNING, "delete failed"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); } return; } @@ -191,102 +199,49 @@ public void deleteIdentifier(Dataset datasetIn) throws Exception { } } - private HashMap getUpdateMetadataFromDataset(Dataset datasetIn) { - HashMap metadata = new HashMap(); - - String authorString = datasetIn.getLatestVersion().getAuthorsStr(); - - if (authorString.isEmpty()) { - authorString = ":unav"; - } - - String producerString = dataverseService.findRootDataverse().getName() + " Dataverse"; - - if (producerString.isEmpty()) { - producerString = ":unav"; - } - metadata.put("datacite.creator", authorString); - metadata.put("datacite.title", datasetIn.getLatestVersion().getTitle()); - metadata.put("datacite.publisher", producerString); + protected HashMap getUpdateMetadataFromDataset(Dataset datasetIn) { + logger.log(Level.FINE,"getUpdateMetadataFromDataset"); + HashMap metadata = super.getUpdateMetadataFromDataset(datasetIn); metadata.put("datacite.publicationyear", generateYear()); return metadata; } - private HashMap getMetadataFromStudyForCreateIndicator(Dataset datasetIn) { - HashMap metadata = new HashMap(); - - String authorString = datasetIn.getLatestVersion().getAuthorsStr(); - - if (authorString.isEmpty()) { - authorString = ":unav"; - } - - String producerString = dataverseService.findRootDataverse().getName() + " Dataverse"; - - if (producerString.isEmpty()) { - producerString = ":unav"; - } - metadata.put("datacite.creator", authorString); - metadata.put("datacite.title", datasetIn.getLatestVersion().getTitle()); - metadata.put("datacite.publisher", producerString); - metadata.put("datacite.publicationyear", generateYear()); - metadata.put("_target", getTargetUrl(datasetIn)); - return metadata; - } - - public HashMap getMetadataFromDatasetForTargetURL(Dataset datasetIn) { - HashMap metadata = new HashMap<>(); - metadata.put("_target", getTargetUrl(datasetIn)); + @Override + public HashMap getMetadataFromStudyForCreateIndicator(Dataset datasetIn) { + logger.log(Level.FINE,"getMetadataFromStudyForCreateIndicator"); + HashMap metadata = super.getMetadataFromStudyForCreateIndicator(datasetIn); + metadata.put("datacite.resourcetype", "Dataset"); return metadata; } - - private String getTargetUrl(Dataset datasetIn){ - return systemConfig.getDataverseSiteUrl() + Dataset.TARGET_URL + datasetIn.getGlobalId(); - } - - private String getIdentifierFromDataset(Dataset dataset) { - return dataset.getGlobalId(); - } - public void publicizeIdentifier(Dataset dataset) throws Exception { - updateIdentifierStatus(dataset, "public"); + @Override + public boolean publicizeIdentifier(Dataset dataset) { + logger.log(Level.FINE,"publicizeIdentifier"); + return updateIdentifierStatus(dataset, "public"); } - private void updateIdentifierStatus(Dataset dataset, String statusIn) throws Exception { + private boolean updateIdentifierStatus(Dataset dataset, String statusIn) { + logger.log(Level.FINE,"updateIdentifierStatus"); String identifier = getIdentifierFromDataset(dataset); - HashMap metadata = getUpdateMetadataFromDataset(dataset); - metadata.put("_target", getTargetUrl(dataset)); + HashMap metadata = getUpdateMetadataFromDataset(dataset); metadata.put("_status", statusIn); + metadata.put("_target", getTargetUrl(dataset)); try { doiDataCiteRegisterService.createIdentifier(identifier, metadata, dataset); + return true; } catch (Exception e) { - logger.log(Level.INFO, "modifyMetadata failed"); - logger.log(Level.INFO, "String " + e.toString()); - logger.log(Level.INFO, "localized message " + e.getLocalizedMessage()); - logger.log(Level.INFO, "cause " + e.getCause()); - logger.log(Level.INFO, "message " + e.getMessage()); - throw e; + logger.log(Level.WARNING, "modifyMetadata failed"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); + return false; } } - public static String generateYear() { - StringBuilder guid = new StringBuilder(); - - // Create a calendar to get the date formatted properly - String[] ids = TimeZone.getAvailableIDs(-8 * 60 * 60 * 1000); - SimpleTimeZone pdt = new SimpleTimeZone(-8 * 60 * 60 * 1000, ids[0]); - pdt.setStartRule(Calendar.APRIL, 1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); - pdt.setEndRule(Calendar.OCTOBER, -1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); - Calendar calendar = new GregorianCalendar(pdt); - Date trialTime = new Date(); - calendar.setTime(trialTime); - guid.append(calendar.get(Calendar.YEAR)); - - return guid.toString(); - } - - public static String generateTimeString() { + private String generateYear() + { StringBuilder guid = new StringBuilder(); // Create a calendar to get the date formatted properly @@ -298,13 +253,6 @@ public static String generateTimeString() { Date trialTime = new Date(); calendar.setTime(trialTime); guid.append(calendar.get(Calendar.YEAR)); - guid.append(calendar.get(Calendar.DAY_OF_YEAR)); - guid.append(calendar.get(Calendar.HOUR_OF_DAY)); - guid.append(calendar.get(Calendar.MINUTE)); - guid.append(calendar.get(Calendar.SECOND)); - guid.append(calendar.get(Calendar.MILLISECOND)); - double random = Math.random(); - guid.append(random); return guid.toString(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java index 4c16cfcfd68..3bf53ddc55e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java @@ -3,21 +3,14 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ - package edu.harvard.iq.dataverse; - -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.SystemConfig; import edu.ucsb.nceas.ezid.EZIDException; import edu.ucsb.nceas.ezid.EZIDService; -import edu.ucsb.nceas.ezid.EZIDServiceRequest; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.*; + +import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; -import javax.ejb.EJB; import javax.ejb.Stateless; /** @@ -25,24 +18,19 @@ * @author skraffmiller */ @Stateless -public class DOIEZIdServiceBean { - @EJB - DataverseServiceBean dataverseService; - @EJB - SettingsServiceBean settingsService; - @EJB - SystemConfig systemConfig; +public class DOIEZIdServiceBean extends AbstractPersistentIdRegistrationServiceBean { + + private static final Logger logger = Logger.getLogger(DOIEZIdServiceBean.class.getCanonicalName()); + EZIDService ezidService; - EZIDServiceRequest ezidServiceRequest; + String baseURLString = "https://ezid.cdlib.org"; - private static final Logger logger = Logger.getLogger("edu.harvard.iq.dvn.core.index.DOIEZIdServiceBean"); - // get username and password from system properties - private String DOISHOULDER = ""; private String USERNAME = ""; private String PASSWORD = ""; - + public DOIEZIdServiceBean() { + logger.log(Level.FINE,"Constructor"); baseURLString = System.getProperty("doi.baseurlstring"); ezidService = new EZIDService (baseURLString); USERNAME = System.getProperty("doi.username"); @@ -59,29 +47,54 @@ public DOIEZIdServiceBean() { } catch(Exception e){ System.out.print("Other Error on ezidService.login(USERNAME, PASSWORD) - not EZIDException "); } - } - - public String createIdentifier(Dataset dataset) { - String retString = ""; + } + + @Override + public boolean registerWhenPublished() { + return false; + } + + @Override + public boolean alreadyExists(Dataset dataset) throws Exception { + logger.log(Level.FINE,"alreadyExists"); + try { + HashMap result = ezidService.getMetadata(getIdentifierFromDataset(dataset)); + return result != null && !result.isEmpty(); + // TODO just check for HTTP status code 200/404, sadly the status code is swept under the carpet + } catch (EZIDException e ){ + logger.log(Level.WARNING, "alreadyExists failed"); + logger.log(Level.WARNING, "String {0}", e.toString()); + logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "message {0}", e.getMessage()); + throw e; + } + } + + @Override + public String createIdentifier(Dataset dataset) throws Exception { + logger.log(Level.FINE,"createIdentifier"); String identifier = getIdentifierFromDataset(dataset); HashMap metadata = getMetadataFromStudyForCreateIndicator(dataset); metadata.put("_status", "reserved"); try { - retString = ezidService.createIdentifier(identifier, metadata); + String retString = ezidService.createIdentifier(identifier, metadata); logger.log(Level.FINE, "create DOI identifier retString : " + retString); + return retString; } catch (EZIDException e) { logger.log(Level.WARNING, "Identifier not created: create failed"); logger.log(Level.WARNING, "String {0}", e.toString()); logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); logger.log(Level.WARNING, "cause", e.getCause()); logger.log(Level.WARNING, "message {0}", e.getMessage()); - return "Identifier not created " + e.getLocalizedMessage(); + throw e; } - return retString; } - - + + + @Override public HashMap getIdentifierMetadata(Dataset dataset) { + logger.log(Level.FINE,"getIdentifierMetadata"); String identifier = getIdentifierFromDataset(dataset); HashMap metadata = new HashMap<>(); try { @@ -93,25 +106,27 @@ public HashMap getIdentifierMetadata(Dataset dataset) { logger.log(Level.WARNING, "cause", e.getCause()); logger.log(Level.WARNING, "message {0}", e.getMessage()); return metadata; - } + } return metadata; } /** * Looks up the metadata for a Global Identifier - * @param protocol the identifier system, e.g. "doi" + * @param protocol the identifier system, e.g. "doi" or "hdl" * @param authority the namespace that the authority manages in the identifier system * @param separator the string that separates authority from local identifier part * @param identifier the local identifier part * @return a Map of metadata. It is empty when the lookup failed, e.g. when * the identifier does not exist. */ + @Override public HashMap lookupMetadataFromIdentifier(String protocol, String authority, String separator, String identifier) { + logger.log(Level.FINE,"lookupMetadataFromIdentifier"); String identifierOut = getIdentifierForLookup(protocol, authority, separator, identifier); HashMap metadata = new HashMap<>(); try { metadata = ezidService.getMetadata(identifierOut); - } catch (EZIDException e) { + } catch (EZIDException e) { logger.log(Level.FINE, "None existing so we can use this identifier"); logger.log(Level.FINE, "identifier: {0}", identifierOut); return metadata; @@ -119,25 +134,15 @@ public HashMap lookupMetadataFromIdentifier(String protocol, Str return metadata; } - /** - * Concatenate the parts that make up a Global Identifier. - * @param protocol the identifier system, e.g. "doi" - * @param authority the namespace that the authority manages in the identifier system - * @param separator the string that separates authority from local identifier part - * @param identifier the local identifier part - * @return the Global Identifier, e.g. "doi:10.12345/67890" - */ - public String getIdentifierForLookup(String protocol, String authority, String separator, String identifier) { - return protocol + ":" + authority + separator + identifier; - } - /** * Modifies the EZID metadata for a Dataset * @param dataset the Dataset whose metadata needs to be modified * @param metadata the new metadata for the Dataset * @return the Dataset identifier, or null if the modification failed */ - public String modifyIdentifier(Dataset dataset, HashMap metadata) { + @Override + public String modifyIdentifier(Dataset dataset, HashMap metadata) throws Exception { + logger.log(Level.FINE,"modifyIdentifier"); String identifier = getIdentifierFromDataset(dataset); try { ezidService.setMetadata(identifier, metadata); @@ -148,26 +153,28 @@ public String modifyIdentifier(Dataset dataset, HashMap metadata logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); logger.log(Level.WARNING, "cause", e.getCause()); logger.log(Level.WARNING, "message {0}", e.getMessage()); - } - return null; + throw e; + } } - + + @Override public void deleteIdentifier(Dataset datasetIn) { + logger.log(Level.FINE,"deleteIdentifier"); String identifier = getIdentifierFromDataset(datasetIn); - HashMap doiMetadata = new HashMap<>(); + HashMap doiMetadata; try { doiMetadata = ezidService.getMetadata(identifier); } catch (EZIDException e) { logger.log(Level.WARNING, "get matadata failed cannot delete"); logger.log(Level.WARNING, "String {0}", e.toString()); logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage()); - logger.log(Level.WARNING, "cause", e.getCause()); + logger.log(Level.WARNING, "cause ", e.getCause()); logger.log(Level.WARNING, "message {0}", e.getMessage()); return; } String idStatus = doiMetadata.get("_status"); - + if (idStatus.equals("reserved")) { logger.log(Level.INFO, "Delete status is reserved.."); try { @@ -187,75 +194,22 @@ public void deleteIdentifier(Dataset datasetIn) { HashMap metadata = new HashMap<>(); metadata.put("_target", "http://ezid.cdlib.org/id/" + datasetIn.getProtocol() + ":" + datasetIn.getAuthority() + datasetIn.getDoiSeparator() + datasetIn.getIdentifier()); - modifyIdentifier(datasetIn, metadata); - } - } - - private HashMap getUpdateMetadataFromDataset(Dataset datasetIn){ - HashMap metadata = new HashMap<>(); - - String authorString = datasetIn.getLatestVersion().getAuthorsStr(); - - if(authorString.isEmpty()) { - authorString = ":unav"; - } - - String producerString = dataverseService.findRootDataverse().getName() + " Dataverse"; - - if(producerString.isEmpty()) { - producerString = ":unav"; - } - metadata.put("datacite.creator", authorString); - metadata.put("datacite.title", datasetIn.getLatestVersion().getTitle()); - metadata.put("datacite.publisher", producerString); - - return metadata; - - } - - public HashMap getMetadataFromStudyForCreateIndicator(Dataset datasetIn) { - HashMap metadata = new HashMap<>(); - - String authorString = datasetIn.getLatestVersion().getAuthorsStr(); - - if (authorString.isEmpty()) { - authorString = ":unav"; - } - - String producerString = dataverseService.findRootDataverse().getName() + " Dataverse"; - - if (producerString.isEmpty()) { - producerString = ":unav"; + try { + modifyIdentifier(datasetIn, metadata); + } catch (Exception e) { + // TODO already logged, how to react here? + } } - metadata.put("datacite.creator", authorString); - metadata.put("datacite.title", datasetIn.getLatestVersion().getTitle()); - metadata.put("datacite.publisher", producerString); - metadata.put("datacite.publicationyear", generateYear()); - metadata.put("datacite.resourcetype", "Dataset"); - metadata.put("_target", getTargetUrl(datasetIn)); - return metadata; - } - - public HashMap getMetadataFromDatasetForTargetURL(Dataset datasetIn) { - HashMap metadata = new HashMap<>(); - metadata.put("_target", getTargetUrl(datasetIn)); - return metadata; - } - - private String getTargetUrl(Dataset datasetIn) { - return systemConfig.getDataverseSiteUrl() + Dataset.TARGET_URL + datasetIn.getGlobalId(); - } - - private String getIdentifierFromDataset(Dataset dataset) { - return dataset.getGlobalId(); } - + @Override public boolean publicizeIdentifier(Dataset dataset) { + logger.log(Level.FINE,"publicizeIdentifier"); return updateIdentifierStatus(dataset, "public"); } - + private boolean updateIdentifierStatus(Dataset dataset, String statusIn) { + logger.log(Level.FINE,"updateIdentifierStatus"); String identifier = getIdentifierFromDataset(dataset); HashMap metadata = getUpdateMetadataFromDataset(dataset); metadata.put("_status", statusIn); @@ -273,45 +227,4 @@ private boolean updateIdentifierStatus(Dataset dataset, String statusIn) { } } - - public static String generateYear() - { - StringBuffer guid = new StringBuffer(); - - // Create a calendar to get the date formatted properly - String[] ids = TimeZone.getAvailableIDs(-8 * 60 * 60 * 1000); - SimpleTimeZone pdt = new SimpleTimeZone(-8 * 60 * 60 * 1000, ids[0]); - pdt.setStartRule(Calendar.APRIL, 1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); - pdt.setEndRule(Calendar.OCTOBER, -1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); - Calendar calendar = new GregorianCalendar(pdt); - Date trialTime = new Date(); - calendar.setTime(trialTime); - guid.append(calendar.get(Calendar.YEAR)); - - return guid.toString(); - } - - public static String generateTimeString() - { - StringBuffer guid = new StringBuffer(); - - // Create a calendar to get the date formatted properly - String[] ids = TimeZone.getAvailableIDs(-8 * 60 * 60 * 1000); - SimpleTimeZone pdt = new SimpleTimeZone(-8 * 60 * 60 * 1000, ids[0]); - pdt.setStartRule(Calendar.APRIL, 1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); - pdt.setEndRule(Calendar.OCTOBER, -1, Calendar.SUNDAY, 2 * 60 * 60 * 1000); - Calendar calendar = new GregorianCalendar(pdt); - Date trialTime = new Date(); - calendar.setTime(trialTime); - guid.append(calendar.get(Calendar.YEAR)); - guid.append(calendar.get(Calendar.DAY_OF_YEAR)); - guid.append(calendar.get(Calendar.HOUR_OF_DAY)); - guid.append(calendar.get(Calendar.MINUTE)); - guid.append(calendar.get(Calendar.SECOND)); - guid.append(calendar.get(Calendar.MILLISECOND)); - double random = Math.random(); - guid.append(random); - - return guid.toString(); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index be0d5dd5c66..d2f428a5dc1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -10,7 +10,6 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; -import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -45,6 +44,9 @@ import org.apache.commons.lang.RandomStringUtils; import org.ocpsoft.common.util.Strings; +import static edu.harvard.iq.dataverse.PersistentIdRegistrationServiceBean.Provider.EZID; +import static edu.harvard.iq.dataverse.PersistentIdRegistrationServiceBean.Protocol.doi; + /** * * @author skraffmiller @@ -176,7 +178,7 @@ public Dataset findByGlobalId(String globalId) { } else { authority = globalId.substring(index1 + 1, index2); } - if (protocol.equals("doi")) { + if (protocol.equals(doi.toString())) { index3 = globalId.indexOf(separator, index2 + 1); if (index3 == -1 ) { @@ -236,7 +238,8 @@ public boolean isUniqueIdentifier(String userIdentifier, String protocol, String boolean u = em.createQuery(query).getResultList().size() == 0; String nonNullDefaultIfKeyNotFound = ""; String doiProvider = settingsService.getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); - if (doiProvider.equals("EZID")) { + if (protocol.equals(doi.toString()) && doiProvider.equals(EZID.toString())) { + // TODO would need CommandContext to use PersistentIdRegistrationServiceBean.getBean, then replace condition above with something like idServiceBean.registerWhenPublished if (!doiEZIdServiceBean.lookupMetadataFromIdentifier(protocol, authority, separator, userIdentifier).isEmpty()) { u = false; } diff --git a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java index eeb815f09d0..87ecbede648 100644 --- a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java @@ -47,8 +47,8 @@ import net.handle.hdllib.ModifyValueRequest; import net.handle.hdllib.PublicKeyAuthenticationInfo; import net.handle.hdllib.ResolutionRequest; -import net.handle.hdllib.ResolutionResponse; import net.handle.hdllib.Util; +import org.apache.commons.lang.NotImplementedException; /** * @@ -60,20 +60,26 @@ * the modifyRegistration datasets API sub-command. */ @Stateless -public class HandlenetServiceBean { +public class HandlenetServiceBean extends AbstractPersistentIdRegistrationServiceBean { + @EJB DataverseServiceBean dataverseService; @EJB SettingsServiceBean settingsService; - private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.HandlenetServiceBean"); - - private static final String HANDLE_PROTOCOL_TAG = "hdl"; + private static final Logger logger = Logger.getLogger(HandlenetServiceBean.class.getCanonicalName()); public HandlenetServiceBean() { + logger.log(Level.FINE,"Constructor"); } - - public void reRegisterHandle(Dataset dataset) { - if (!HANDLE_PROTOCOL_TAG.equals(dataset.getProtocol())) { + + @Override + public boolean registerWhenPublished() { + return true; // TODO current value plays safe, can we loosen up? + } + + private void reRegisterHandle(Dataset dataset) { + logger.log(Level.FINE,"reRegisterHandle"); + if (!Protocol.hdl.toString().equals(dataset.getProtocol())) { logger.warning("reRegisterHandle called on a dataset with the non-handle global id: "+dataset.getId()); } @@ -131,7 +137,8 @@ public void reRegisterHandle(Dataset dataset) { } } - public void registerNewHandle(Dataset dataset) { + private Throwable registerNewHandle(Dataset dataset) { + logger.log(Level.FINE,"registerNewHandle"); String handlePrefix = dataset.getAuthority(); String handle = getDatasetHandle(dataset); String datasetUrl = getRegistrationUrl(dataset); @@ -143,8 +150,6 @@ public void registerNewHandle(Dataset dataset) { PublicKeyAuthenticationInfo auth = getAuthInfo(handlePrefix); HandleResolver resolver = new HandleResolver(); - int index = 300; - try { AdminRecord admin = new AdminRecord(authHandle.getBytes("UTF8"), 300, @@ -168,15 +173,24 @@ public void registerNewHandle(Dataset dataset) { AbstractResponse response = resolver.processRequest(req); if (response.responseCode == AbstractMessage.RC_SUCCESS) { logger.info("Success! Response: \n" + response); + return null; } else { + logger.log(Level.WARNING, "registerNewHandle failed"); logger.warning("Error response: \n" + response); + return new Exception("registerNewHandle failed: " + response); } } catch (Throwable t) { - logger.warning("\nError (caught exception): " + t); + logger.log(Level.WARNING, "registerNewHandle failed"); + logger.log(Level.WARNING, "String {0}", t.toString()); + logger.log(Level.WARNING, "localized message {0}", t.getLocalizedMessage()); + logger.log(Level.WARNING, "cause", t.getCause()); + logger.log(Level.WARNING, "message {0}", t.getMessage()); + return t; } } - public boolean isHandleRegistered(String handle){ + private boolean isHandleRegistered(String handle){ + logger.log(Level.FINE,"isHandleRegistered"); boolean handleRegistered = false; ResolutionRequest req = buildResolutionRequest(handle); AbstractResponse response = null; @@ -195,6 +209,7 @@ public boolean isHandleRegistered(String handle){ } private ResolutionRequest buildResolutionRequest(final String handle) { + logger.log(Level.FINE,"buildResolutionRequest"); String handlePrefix = handle.substring(0,handle.indexOf("/")); PublicKeyAuthenticationInfo auth = getAuthInfo(handlePrefix); @@ -213,6 +228,7 @@ private ResolutionRequest buildResolutionRequest(final String handle) { } private PublicKeyAuthenticationInfo getAuthInfo(String handlePrefix) { + logger.log(Level.FINE,"getAuthInfo"); byte[] key = null; String adminCredFile = System.getProperty("dataverse.handlenet.admcredfile"); @@ -225,6 +241,7 @@ private PublicKeyAuthenticationInfo getAuthInfo(String handlePrefix) { return auth; } private String getRegistrationUrl(Dataset dataset) { + logger.log(Level.FINE,"getRegistrationUrl"); String siteUrl = getSiteUrl(); //String targetUrl = siteUrl + "/dataset.xhtml?persistentId=hdl:" + dataset.getAuthority() @@ -233,7 +250,8 @@ private String getRegistrationUrl(Dataset dataset) { return targetUrl; } - public String getSiteUrl() { + private String getSiteUrl() { + logger.log(Level.FINE,"getSiteUrl"); String hostUrl = System.getProperty("dataverse.siteUrl"); if (hostUrl != null && !"".equals(hostUrl)) { return hostUrl; @@ -251,6 +269,7 @@ public String getSiteUrl() { } private byte[] readKey(final String file) { + logger.log(Level.FINE,"readKey"); byte[] key = null; try { File f = new File(file); @@ -267,6 +286,7 @@ private byte[] readKey(final String file) { } private PrivateKey readPrivKey(byte[] key, final String file) { + logger.log(Level.FINE,"readPrivKey"); PrivateKey privkey=null; String secret = System.getProperty("dataverse.handlenet.admprivphrase"); @@ -297,8 +317,93 @@ private String getHandleAuthority(Dataset dataset){ } private String getHandleAuthority(String handlePrefix) { + logger.log(Level.FINE,"getHandleAuthority"); return "0.NA/" + handlePrefix; } + + @Override + public boolean alreadyExists(Dataset dataset) throws Exception { + String handle = getDatasetHandle(dataset); + return isHandleRegistered(handle); + } + + @Override + public String createIdentifier(Dataset dataset) throws Throwable { + Throwable result = registerNewHandle(dataset); + if (result != null) + throw result; + // TODO get exceptions from under the carpet + return getDatasetHandle(dataset); + } + + @Override + public HashMap getIdentifierMetadata(Dataset dataset) { + throw new NotImplementedException(); + } + + @Override + public HashMap lookupMetadataFromIdentifier(String protocol, String authority, String separator, String identifier) { + throw new NotImplementedException(); + } + + @Override + public String modifyIdentifier(Dataset dataset, HashMap metadata) throws Exception { + logger.log(Level.FINE,"modifyIdentifier"); + reRegisterHandle(dataset); + return getIdentifierFromDataset(dataset); + } + + @Override + public void deleteIdentifier(Dataset datasetIn) throws Exception { + String handle = getDatasetHandle(datasetIn); + String authHandle = getAuthHandle(datasetIn); + + String adminCredFile = System.getProperty("dataverse.handlenet.admcredfile"); + + byte[] key = readKey(adminCredFile); + PrivateKey privkey = readPrivKey(key, adminCredFile); + + HandleResolver resolver = new HandleResolver(); + resolver.setSessionTracker(new ClientSessionTracker()); + + PublicKeyAuthenticationInfo auth = + new PublicKeyAuthenticationInfo(Util.encodeString(authHandle), 300, privkey); + + DeleteHandleRequest req = + new DeleteHandleRequest(Util.encodeString(handle), auth); + AbstractResponse response=null; + try { + response = resolver.processRequest(req); + } catch (HandleException ex) { + ex.printStackTrace(); + } + if(response==null || response.responseCode!=AbstractMessage.RC_SUCCESS) { + logger.fine("error deleting '"+handle+"': "+response); + } else { + logger.fine("deleted "+handle); + } + } + + @Override + public boolean publicizeIdentifier(Dataset dataset) { + logger.log(Level.FINE,"publicizeIdentifier"); + return updateIdentifierStatus(dataset, "public"); + } + + private boolean updateIdentifierStatus(Dataset dataset, String statusIn) { + logger.log(Level.FINE,"updateIdentifierStatus"); + String identifier = getIdentifierFromDataset(dataset); + HashMap metadata = getUpdateMetadataFromDataset(dataset); + metadata.put("_status", statusIn); + metadata.put("_target", getTargetUrl(dataset)); + // TODO drop getting identifier and meatdata if indeed not required + return null == registerNewHandle(dataset); // Exception have been logged + } + + private String getAuthHandle(Dataset datasetIn) { + // TODO hack: GNRSServiceBean retrieved this from vdcNetworkService + return "0.NA/" + datasetIn.getAuthority(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/PersistentIdRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PersistentIdRegistrationServiceBean.java new file mode 100644 index 00000000000..b8e5c31d8dc --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/PersistentIdRegistrationServiceBean.java @@ -0,0 +1,98 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + +import java.util.HashMap; +import java.util.logging.Level; +import java.util.logging.Logger; + + +public interface PersistentIdRegistrationServiceBean { + + Logger logger = Logger.getLogger(PersistentIdRegistrationServiceBean.class.getCanonicalName()); + + enum Provider { + EZID { + public PersistentIdRegistrationServiceBean getBean(CommandContext ctxt) { + return ctxt.doiEZId(); + } + }, DataCite { + public PersistentIdRegistrationServiceBean getBean(CommandContext ctxt) { + return ctxt.doiDataCite(); + } + }; + + public abstract PersistentIdRegistrationServiceBean getBean(CommandContext ctxt); + } + + enum Protocol { + hdl { + public PersistentIdRegistrationServiceBean getBean(Provider provider, CommandContext ctxt) { + return ctxt.handleNet(); + } + + }, doi { + public PersistentIdRegistrationServiceBean getBean(Provider provider, CommandContext ctxt) { + return provider.getBean(ctxt); + } + + }; + + public abstract PersistentIdRegistrationServiceBean getBean(Provider provider, CommandContext ctxt); + } + + boolean alreadyExists(Dataset dataset) throws Exception; + + boolean registerWhenPublished(); + + String createIdentifier(Dataset dataset) throws Throwable; + + HashMap getIdentifierMetadata(Dataset dataset); + + HashMap lookupMetadataFromIdentifier(String protocol, String authority, String separator, String identifier); + + /** + * Concatenate the parts that make up a Global Identifier. + * @param protocol the identifier system, e.g. "doi" + * @param authority the namespace that the authority manages in the identifier system + * @param separator the string that separates authority from local identifier part + * @param identifier the local identifier part + * @return the Global Identifier, e.g. "doi:10.12345/67890" + */ + String getIdentifierForLookup(String protocol, String authority, String separator, String identifier); + + String modifyIdentifier(Dataset dataset, HashMap metadata) throws Exception; + + void deleteIdentifier(Dataset datasetIn) throws Exception; + + HashMap getMetadataFromStudyForCreateIndicator(Dataset datasetIn); + + HashMap getMetadataFromDatasetForTargetURL(Dataset datasetIn); + + String getIdentifierFromDataset(Dataset dataset); + + boolean publicizeIdentifier(Dataset studyIn); + + void postDeleteCleanup(final Dataset doomed); + + static PersistentIdRegistrationServiceBean getBean(String protocolString, CommandContext ctxt) { + logger.log(Level.FINE,"getting bean, protocol=" + protocolString); + String nonNullDefaultIfKeyNotFound = ""; + String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); + try { + return Protocol.valueOf(protocolString).getBean(Provider.valueOf(doiProvider), ctxt); + } catch (Exception e) { + logger.log(Level.SEVERE,"Unknown doiProvider and/or protocol: " + doiProvider + " " + protocolString); + return null; + } + } + + static PersistentIdRegistrationServiceBean getBean(CommandContext ctxt) { + logger.log(Level.FINE,"getting bean with protocol from context"); + + String nonNullDefaultIfKeyNotFound = ""; + String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); + return getBean(protocol, ctxt); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f0af8490c3d..d496072ae85 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -93,9 +93,6 @@ public class Datasets extends AbstractApiBean { @EJB DataverseServiceBean dataverseService; - - @EJB - DOIEZIdServiceBean doiEZIdServiceBean; @EJB DDIExportServiceBean ddiExportService; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java index 25d2231708a..4a3116eb22f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java @@ -1,18 +1,11 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersionUser; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; -import edu.harvard.iq.dataverse.RoleAssignment; -import edu.harvard.iq.dataverse.Template; import edu.harvard.iq.dataverse.api.imports.ImportUtil; import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -26,7 +19,6 @@ import java.util.Iterator; import java.util.Objects; import java.util.Set; -import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; import javax.validation.ConstraintViolation; @@ -82,7 +74,7 @@ public CreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boole @Override public Dataset execute(CommandContext ctxt) throws CommandException { SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss"); - + if ( (importType != ImportType.MIGRATION && importType != ImportType.HARVEST) && !ctxt.datasets().isUniqueIdentifier(theDataset.getIdentifier(), theDataset.getProtocol(), theDataset.getAuthority(), theDataset.getDoiSeparator()) ) { throw new IllegalCommandException(String.format("Dataset with identifier '%s', protocol '%s' and authority '%s' already exists", theDataset.getIdentifier(), theDataset.getProtocol(), theDataset.getAuthority()), @@ -107,7 +99,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { theDataset.setCreator((AuthenticatedUser) getRequest().getUser()); theDataset.setCreateDate(new Timestamp(new Date().getTime())); - + Iterator dsfIt = dsv.getDatasetFields().iterator(); while (dsfIt.hasNext()) { if (dsfIt.next().removeBlankDatasetFieldValues()) { @@ -138,29 +130,23 @@ public Dataset execute(CommandContext ctxt) throws CommandException { if (theDataset.getIdentifier()==null) { theDataset.setIdentifier(ctxt.datasets().generateIdentifierSequence(theDataset.getProtocol(), theDataset.getAuthority(), theDataset.getDoiSeparator())); } + logger.log(Level.FINE,"doiProvider={0} protocol={1} importType={2} GlobalIdCreateTime=={3}", new Object[]{doiProvider, protocol, importType, theDataset.getGlobalIdCreateTime()}); // Attempt the registration if importing dataset through the API, or the app (but not harvest or migrate) if ((importType == null || importType.equals(ImportType.NEW)) && theDataset.getGlobalIdCreateTime() == null) { - if (protocol.equals("doi")) { String doiRetString = ""; - if (doiProvider.equals("EZID")) { - doiRetString = ctxt.doiEZId().createIdentifier(theDataset); - } - if (doiProvider.equals("DataCite")) { - try{ - doiRetString = ctxt.doiDataCite().createIdentifier(theDataset); - } catch (Exception e){ - logger.log(Level.WARNING, "Exception while creating Identifier:" + e.getMessage(), e); - } + PersistentIdRegistrationServiceBean persistentIdRegistrationServiceBean = PersistentIdRegistrationServiceBean.getBean(ctxt); + try{ + logger.log(Level.FINE,"creating identifier"); + doiRetString = persistentIdRegistrationServiceBean.createIdentifier(theDataset); + } catch (Throwable e){ + logger.log(Level.WARNING, "Exception while creating Identifier: " + e.getMessage(), e); } // Check return value to make sure registration succeeded - if (doiProvider.equals("EZID") && doiRetString.contains(theDataset.getIdentifier())) { + if (!persistentIdRegistrationServiceBean.registerWhenPublished() && doiRetString.contains(theDataset.getIdentifier())) { theDataset.setGlobalIdCreateTime(createDate); } - - } - } else // If harvest or migrate, and this is a released dataset, we don't need to register, // so set the globalIdCreateTime to now if (theDataset.getLatestVersion().getVersionState().equals(VersionState.RELEASED)) { @@ -170,9 +156,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { if (registrationRequired && theDataset.getGlobalIdCreateTime() == null) { throw new IllegalCommandException("Dataset could not be created. Registration failed", this); } - logger.log(Level.FINE, "after doi {0}", formatter.format(new Date().getTime())); + logger.log(Level.FINE, "after doi {0}", formatter.format(new Date().getTime())); Dataset savedDataset = ctxt.em().merge(theDataset); - logger.log(Level.FINE, "after db update {0}", formatter.format(new Date().getTime())); + logger.log(Level.FINE, "after db update {0}", formatter.format(new Date().getTime())); // set the role to be default contributor role for its dataverse if (importType==null || importType.equals(ImportType.NEW)) { String privateUrlToken = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java index 402cdc0b4c9..cd945326da6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.PersistentIdRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; @@ -15,12 +16,11 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.export.ExportException; import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.util.ResourceBundle; -import java.util.logging.Level; +import java.util.logging.Logger; /** * @@ -29,6 +29,7 @@ @RequiredPermissions(Permission.PublishDataset) public class DeaccessionDatasetVersionCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(DeaccessionDatasetVersionCommand.class.getCanonicalName()); final DatasetVersion theVersion; final boolean deleteDOIIdentifier; @@ -45,51 +46,48 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { Dataset ds = theVersion.getDataset(); theVersion.setVersionState(DatasetVersion.VersionState.DEACCESSIONED); - + + logger.fine("deleteDOIIdentifier=" + deleteDOIIdentifier); if (deleteDOIIdentifier) { String nonNullDefaultIfKeyNotFound = ""; + String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); + PersistentIdRegistrationServiceBean persistentIdRegistrationServiceBean = PersistentIdRegistrationServiceBean.getBean(ctxt); - String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); - - if (doiProvider.equals("EZID")) { - ctxt.doiEZId().deleteIdentifier(ds); - } - if (doiProvider.equals("DataCite")) { - try { - ctxt.doiDataCite().deleteIdentifier(ds); - } catch (Exception e) { - if (e.toString().contains("Internal Server Error")) { - throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.publish.error.datacite"), this); - } - throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.delete.error.datacite"), this); + logger.fine("protocol=" + protocol); + try { + persistentIdRegistrationServiceBean.deleteIdentifier(ds); + } catch (Exception e) { + if (e.toString().contains("Internal Server Error")) { + throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.publish.error.datacite"), this); } + throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.delete.error.datacite"), this); } - } + } DatasetVersion managed = ctxt.em().merge(theVersion); boolean doNormalSolrDocCleanUp = true; ctxt.index().indexDataset(managed.getDataset(), doNormalSolrDocCleanUp); - // if there is still another released version of this dataset, - // we want to re-export it : - + // if there is still another released version of this dataset, + // we want to re-export it : + ExportService instance = ExportService.getInstance(); - + if (managed.getDataset().getReleasedVersion() != null) { try { instance.exportAllFormats(managed.getDataset()); } catch (ExportException ex) { - // Something went wrong! - // But we're not going to treat it as a fatal condition. + // Something went wrong! + // But we're not going to treat it as a fatal condition. } } else { - // otherwise, we need to wipe clean the exports we may have cached: + // otherwise, we need to wipe clean the exports we may have cached: instance.clearAllCachedFormats(managed.getDataset()); } // And save the dataset, to get the "last exported" timestamp right: - + Dataset managedDs = ctxt.em().merge(managed.getDataset()); - + return managed; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java index 1f27ba0a199..524dde8b887 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.PersistentIdRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.RoleAssignment; @@ -79,13 +80,11 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { // ROLES for (DataverseRole ra : ctxt.roles().findByOwnerId(doomed.getId())) { ctxt.em().remove(ra); - } - - //Register Cache - if(ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, "").equals("DataCite")){ - ctxt.doiDataCite().deleteRecordFromCache(doomed); } + //Register Cache + PersistentIdRegistrationServiceBean.getBean(ctxt).postDeleteCleanup(doomed); + Dataverse toReIndex = managedDoomed.getOwner(); // dataset diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 43d24095396..1fe6301dee0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -5,15 +5,7 @@ */ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldConstant; -import edu.harvard.iq.dataverse.DatasetVersionUser; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.RoleAssignment; -import edu.harvard.iq.dataverse.UserNotification; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; @@ -27,20 +19,13 @@ import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.search.IndexResponse; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonAsDatasetDto; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; + import java.sql.Timestamp; import java.util.Date; import java.util.List; import java.util.ResourceBundle; import java.util.logging.Level; import java.util.logging.Logger; -import javax.json.JsonObjectBuilder; /** * @@ -62,12 +47,14 @@ public class PublishDatasetCommand extends AbstractCommand { */ public PublishDatasetCommand(Dataset datasetIn, DataverseRequest aRequest, boolean minor) { super(aRequest, datasetIn); + logger.log(Level.FINE,"Constructor"); minorRelease = minor; theDataset = datasetIn; } @Override public Dataset execute(CommandContext ctxt) throws CommandException { + logger.log(Level.FINE,"execute"); if (!theDataset.getOwner().isReleased()) { throw new IllegalCommandException("This dataset may not be published because its host dataverse (" + theDataset.getOwner().getAlias() + ") has not been published.", this); @@ -85,47 +72,29 @@ public Dataset execute(CommandContext ctxt) throws CommandException { String protocol = theDataset.getProtocol(); String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); String authority = theDataset.getAuthority(); + PersistentIdRegistrationServiceBean persistentIdRegistrationServiceBean = PersistentIdRegistrationServiceBean.getBean(protocol, ctxt); + logger.log(Level.FINE,"doiProvider={0} protocol={1} GlobalIdCreateTime=={2}", new Object[]{doiProvider, protocol, theDataset.getGlobalIdCreateTime()}); if (theDataset.getGlobalIdCreateTime() == null) { - if (protocol.equals("doi") - && (doiProvider.equals("EZID") || doiProvider.equals("DataCite"))) { - String doiRetString = ""; - if (doiProvider.equals("EZID")) { - doiRetString = ctxt.doiEZId().createIdentifier(theDataset); - if (doiRetString.contains(theDataset.getIdentifier())) { + if (persistentIdRegistrationServiceBean !=null) { + try { + if (!persistentIdRegistrationServiceBean.alreadyExists(theDataset)) { + persistentIdRegistrationServiceBean.createIdentifier(theDataset); theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - } else if (doiRetString.contains("identifier already exists")) { - theDataset.setIdentifier(ctxt.datasets().generateIdentifierSequence(protocol, authority, theDataset.getDoiSeparator())); - doiRetString = ctxt.doiEZId().createIdentifier(theDataset); - if (!doiRetString.contains(theDataset.getIdentifier())) { - throw new IllegalCommandException("This dataset may not be published because its identifier is already in use by another dataset. Please contact Dataverse Support for assistance.", this); - } else { - theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - } } else { - throw new IllegalCommandException("This dataset may not be published because it has not been registered. Please contact Dataverse Support for assistance.", this); - } - } - - if (doiProvider.equals("DataCite")) { - try { - if (!ctxt.doiDataCite().alreadyExists(theDataset)) { - ctxt.doiDataCite().createIdentifier(theDataset); + theDataset.setIdentifier(ctxt.datasets().generateIdentifierSequence(protocol, authority, theDataset.getDoiSeparator())); + if (!persistentIdRegistrationServiceBean.alreadyExists(theDataset)) { + persistentIdRegistrationServiceBean.createIdentifier(theDataset); theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); } else { - theDataset.setIdentifier(ctxt.datasets().generateIdentifierSequence(protocol, authority, theDataset.getDoiSeparator())); - if (!ctxt.doiDataCite().alreadyExists(theDataset)) { - ctxt.doiDataCite().createIdentifier(theDataset); - theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - } else { - throw new IllegalCommandException("This dataset may not be published because its identifier is already in use by another dataset. Please contact Dataverse Support for assistance.", this); - } + throw new IllegalCommandException("This dataset may not be published because its identifier is already in use by another dataset.", this); } - } catch (Exception e) { - throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.publish.error.datacite"), this); } + } catch (Throwable e) { + // TODO add a variant for EZId + throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.publish.error.datacite"), this); } } else { - throw new IllegalCommandException("This dataset may not be published because its DOI provider is not supported. Please contact Dataverse Support for assistance.", this); + throw new IllegalCommandException("This dataset may not be published because its id registry service is not supported.", this); } } @@ -243,23 +212,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.em().merge(datasetDataverseUser); } - if (protocol.equals("doi") - && doiProvider.equals("EZID")) { - ctxt.doiEZId().publicizeIdentifier(savedDataset); - } - if (protocol.equals("doi") - && doiProvider.equals("DataCite")) { - try { - ctxt.doiDataCite().publicizeIdentifier(savedDataset); - } catch (IOException io) { + if (persistentIdRegistrationServiceBean != null && !persistentIdRegistrationServiceBean.registerWhenPublished()) + if (!persistentIdRegistrationServiceBean.publicizeIdentifier(savedDataset)) throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.publish.error.datacite"), this); - } catch (Exception e) { - if (e.toString().contains("Internal Server Error")) { - throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.publish.error.datacite"), this); - } - throw new CommandException(ResourceBundle.getBundle("Bundle").getString("dataset.publish.error.datacite"), this); - } - } PrivateUrl privateUrl = ctxt.engine().submit(new GetPrivateUrlCommand(getRequest(), savedDataset)); if (privateUrl != null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java index 51e595b7533..9704ce5ccae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java @@ -5,12 +5,7 @@ */ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.DataFileCategory; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersionUser; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; @@ -27,6 +22,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.Future; +import java.util.logging.Level; import java.util.logging.Logger; import javax.validation.ConstraintViolation; @@ -180,25 +176,35 @@ public Dataset save(CommandContext ctxt) throws CommandException { String nonNullDefaultIfKeyNotFound = ""; String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); - if (theDataset.getProtocol().equals("doi") - && doiProvider.equals("EZID") && theDataset.getGlobalIdCreateTime() == null) { - String doiRetString = ctxt.doiEZId().createIdentifier(theDataset); - if (doiRetString.contains(theDataset.getIdentifier())) { - theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - } else { - //try again if identifier exists - if (doiRetString.contains("identifier already exists")) { - theDataset.setIdentifier(ctxt.datasets().generateIdentifierSequence(theDataset.getProtocol(), theDataset.getAuthority(), theDataset.getDoiSeparator())); - doiRetString = ctxt.doiEZId().createIdentifier(theDataset); - if (!doiRetString.contains(theDataset.getIdentifier())) { - // didn't register new identifier + PersistentIdRegistrationServiceBean persistentIdRegistrationServiceBean = PersistentIdRegistrationServiceBean.getBean(ctxt); + boolean registerWhenPublished = persistentIdRegistrationServiceBean.registerWhenPublished(); + logger.log(Level.FINE,"doiProvider={0} protocol={1} GlobalIdCreateTime=={2}", new Object[]{doiProvider, theDataset.getProtocol(), theDataset.getGlobalIdCreateTime()}); + if ( !registerWhenPublished && theDataset.getGlobalIdCreateTime() == null) { + String doiRetString = null; + try { + logger.fine("creating identifier"); + doiRetString = persistentIdRegistrationServiceBean.createIdentifier(theDataset); + if (doiRetString.contains(theDataset.getIdentifier())) { + logger.fine("created: "+doiRetString); + theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); + } else { + //try again if identifier exists + if (doiRetString.contains("identifier already exists")) { + theDataset.setIdentifier(ctxt.datasets().generateIdentifierSequence(theDataset.getProtocol(), theDataset.getAuthority(), theDataset.getDoiSeparator())); + logger.fine("creating identifier again because it exists: "+doiRetString); + doiRetString = persistentIdRegistrationServiceBean.createIdentifier(theDataset); + logger.fine("new value: "+doiRetString); + if (!doiRetString.contains(theDataset.getIdentifier())) { + // didn't register new identifier + } else { + theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); + } } else { - theDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); + //some reason other than duplicate identifier so don't try again } - } else { - //some reason other that duplicate identifier so don't try again - //EZID down possibly } + } catch (Throwable e) { + // EZID probably down } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetTargetURLCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetTargetURLCommand.java index e2e6ffa64b9..3cbde44d257 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetTargetURLCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetTargetURLCommand.java @@ -6,22 +6,20 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.PersistentIdRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; + import java.sql.Timestamp; import java.util.Collections; import java.util.Date; import java.util.HashMap; -import java.util.ResourceBundle; /** * @@ -45,49 +43,20 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { this, Collections.singleton(Permission.EditDataset), target); } - if (target.getProtocol().equals("doi")) { - String nonNullDefaultIfKeyNotFound = ""; - String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); - if (doiProvider.equals("EZID")) { - HashMap metadata = ctxt.doiEZId().getMetadataFromDatasetForTargetURL(target); - String doiRetString = ctxt.doiEZId().modifyIdentifier(target, metadata); - if (doiRetString != null && doiRetString.contains(target.getIdentifier())) { - target.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - ctxt.em().merge(target); - ctxt.em().flush(); - } else { - //do nothing - we'll know it failed because the global id create time won't have been updated. - } - } - if (doiProvider.equals("DataCite")) { - HashMap metadata = ctxt.doiDataCite().getMetadataFromDatasetForTargetURL(target); - try { - String doiRetString = ctxt.doiDataCite().modifyIdentifier(target, metadata); - if (doiRetString != null && doiRetString.contains(target.getIdentifier())) { - target.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - ctxt.em().merge(target); - ctxt.em().flush(); - } else { - //do nothing - we'll know it failed because the global id create time won't have been updated. - } - } catch (Exception e) { - //do nothing - we'll know it failed because the global id create time won't have been updated. - } + PersistentIdRegistrationServiceBean persistentIdRegistrationServiceBean = PersistentIdRegistrationServiceBean.getBean(target.getProtocol(), ctxt); + HashMap metadata = persistentIdRegistrationServiceBean.getMetadataFromDatasetForTargetURL(target); + try { + String doiRetString = persistentIdRegistrationServiceBean.modifyIdentifier(target, metadata); + if (doiRetString != null && doiRetString.contains(target.getIdentifier())) { + target.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); + ctxt.em().merge(target); + ctxt.em().flush(); + } else { + //do nothing - we'll know it failed because the global id create time won't have been updated. } - - } else if ("hdl".equals(target.getProtocol())) { - // TODO: - // handlenet registration still needs diagnostics! - // -- L.A. 4.0 - ctxt.handleNet().reRegisterHandle(target); - target.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); - ctxt.em().merge(target); - ctxt.em().flush(); - } else { - // TODO why not throw an IllegalCommandException? - throw new UnsupportedOperationException("UpdateDatasetTargetURLCommand only supported for doi protocol."); //To change body of generated methods, choose Tools | Templates. + }catch (Exception e) { + //do nothing - idem and the problem has been logged } - } }