Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion doc/sphinx-guides/source/installation/config.rst
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,13 @@ Specify a URL where users can read your API Terms of Use.

``curl -X PUT -d http://best-practices.dataverse.org/harvard-policies/harvard-api-tou.html http://localhost:8080/api/admin/settings/:ApiTermsOfUse``

:ExcludeEmailFromExport
+++++++++++++++++++++++

Set ``:ExcludeEmailFromExport`` to prevent email addresses for dataset contacts from being exposed in XML or JSON representations of dataset metadata. For a list exported formats such as DDI, see the :doc:`/admin/metadataexport` section of the Admin Guide.

``curl -X PUT -d true http://localhost:8080/api/admin/settings/:ExcludeEmailFromExport``

:GuidesBaseUrl
++++++++++++++

Expand Down Expand Up @@ -607,7 +614,7 @@ The default checksum algorithm used is MD5 and should be sufficient for establis
:ShibPassiveLoginEnabled
++++++++++++++++++++++++

Set ``ShibPassiveLoginEnabled`` to true to enable passive login for Shibboleth. When this feature is enabled, an additional Javascript file (isPassive.js) will be loaded for every page. It will generate a passive login request to your Shibboleth SP when an anonymous user navigates to the site. A cookie named "_check_is_passive_dv" will be created to keep track of whether or not a passive login request has already been made for the user.
Set ``:ShibPassiveLoginEnabled`` to true to enable passive login for Shibboleth. When this feature is enabled, an additional Javascript file (isPassive.js) will be loaded for every page. It will generate a passive login request to your Shibboleth SP when an anonymous user navigates to the site. A cookie named "_check_is_passive_dv" will be created to keep track of whether or not a passive login request has already been made for the user.

This implementation follows the example on the Shibboleth wiki documentation page for the isPassive feature: https://wiki.shibboleth.net/confluence/display/SHIB2/isPassive

Expand Down
26 changes: 2 additions & 24 deletions src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -339,10 +339,6 @@ public boolean isUnlimitedUploadFileSize(){
return false;
}

public boolean isMetadataExportEnabled() {
return metadataExportEnabled;
}

public String getDataverseSiteUrl() {
return this.dataverseSiteUrl;
}
Expand Down Expand Up @@ -1003,7 +999,6 @@ public void setDisplayFileMetadata(List<FileMetadata> displayFileMetadata) {
}

private boolean readOnly = true;
private boolean metadataExportEnabled;
private String originalSourceUrl = null;

public String getOriginalSourceUrl() {
Expand All @@ -1027,11 +1022,6 @@ private String init(boolean initFull) {

this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSize();
setDataverseSiteUrl(systemConfig.getDataverseSiteUrl());
/**
* For now having DDI export enabled is a proxy for having any metadata
* export enabled (JSON, Dublin Core, etc.).
*/
metadataExportEnabled = systemConfig.isDdiExportEnabled();

guestbookResponse = new GuestbookResponse();

Expand Down Expand Up @@ -2586,28 +2576,16 @@ public String getTabularDataFileURL(Long fileid) {
return dataURL;
}

public String getMetadataAsJsonUrl() {
if (dataset != null) {
Long datasetId = dataset.getId();
if (datasetId != null) {
String myHostURL = getDataverseSiteUrl();
String metadataAsJsonUrl = myHostURL + "/api/datasets/" + datasetId;
return metadataAsJsonUrl;
}
}
return null;
}

public List< String[]> getExporters(){
List<String[]> retList = new ArrayList();
String myHostURL = getDataverseSiteUrl();
for (String [] provider : ExportService.getInstance().getExportersLabels() ){
for (String [] provider : ExportService.getInstance(settingsService).getExportersLabels() ){
String formatName = provider[1];
String formatDisplayName = provider[0];

Exporter exporter = null;
try {
exporter = ExportService.getInstance().getExporter(formatName);
exporter = ExportService.getInstance(settingsService).getExporter(formatName);
} catch (ExportException ex) {
exporter = null;
}
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/FilePage.java
Original file line number Diff line number Diff line change
Expand Up @@ -200,13 +200,13 @@ public void setVersion(String version) {
public List< String[]> getExporters(){
List<String[]> retList = new ArrayList();
String myHostURL = systemConfig.getDataverseSiteUrl();
for (String [] provider : ExportService.getInstance().getExportersLabels() ){
for (String [] provider : ExportService.getInstance(settingsService).getExportersLabels() ){
String formatName = provider[1];
String formatDisplayName = provider[0];

Exporter exporter = null;
try {
exporter = ExportService.getInstance().getExporter(formatName);
exporter = ExportService.getInstance(settingsService).getExporter(formatName);
} catch (ExportException ex) {
exporter = null;
}
Expand Down
53 changes: 2 additions & 51 deletions src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @
return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
}

ExportService instance = ExportService.getInstance();
ExportService instance = ExportService.getInstance(settingsSvc);

String xml = instance.getExportAsString(dataset, exporter);
// I'm wondering if this going to become a performance problem
Expand Down Expand Up @@ -290,7 +290,7 @@ public Response getVersionMetadataBlock( @PathParam("id") String datasetId,
Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
if ( p.getKey().getName().equals(blockName) ) {
return ok( json(p.getKey(), p.getValue()) );
return ok(json(p.getKey(), p.getValue()));
}
}
return notFound("metadata block named " + blockName + " not found");
Expand Down Expand Up @@ -441,55 +441,6 @@ public Response getLinks(@PathParam("id") String idSupplied ) {
}
}

/**
* @todo Implement this for real as part of
* https://github.com/IQSS/dataverse/issues/2579
*/
@GET
@Path("ddi")
@Produces({"application/xml", "application/json"})
@Deprecated
public Response getDdi(@QueryParam("id") long id, @QueryParam("persistentId") String persistentId, @QueryParam("dto") boolean dto) {
boolean ddiExportEnabled = systemConfig.isDdiExportEnabled();
if (!ddiExportEnabled) {
return error(Response.Status.FORBIDDEN, "Disabled");
}
try {
User u = findUserOrDie();
if (!u.isSuperuser()) {
return error(Response.Status.FORBIDDEN, "Not a superuser");
}

logger.fine("looking up " + persistentId);
Dataset dataset = datasetService.findByGlobalId(persistentId);
if (dataset == null) {
return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
}

String xml = "<codeBook>XML_BEING_COOKED</codeBook>";
if (dto) {
/**
* @todo We can only assume that this should not be hard-coded
* to getLatestVersion
*/
final JsonObjectBuilder datasetAsJson = jsonAsDatasetDto(dataset.getLatestVersion());
xml = DdiExportUtil.datasetDtoAsJson2ddi(datasetAsJson.toString());
} else {
OutputStream outputStream = new ByteArrayOutputStream();
ddiExportService.exportDataset(dataset.getId(), outputStream, null, null);
xml = outputStream.toString();
}
logger.fine("xml to return: " + xml);

return Response.ok()
.entity(xml)
.type(MediaType.APPLICATION_XML).
build();
} catch (WrappedResponse wr) {
return wr.getResponse();
}
}

/**
* @todo Make this real. Currently only used for API testing. Copied from
* the equivalent API endpoint for dataverses and simplified with values
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
// if there is still another released version of this dataset,
// we want to re-export it :

ExportService instance = ExportService.getInstance();
ExportService instance = ExportService.getInstance(ctxt.settings());

if (managed.getDataset().getReleasedVersion() != null) {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
*/

try {
ExportService instance = ExportService.getInstance();
ExportService instance = ExportService.getInstance(ctxt.settings());
instance.exportAllFormats(theDataset);

} catch (ExportException ex) {
Expand Down
24 changes: 16 additions & 8 deletions src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,14 @@
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.export.spi.Exporter;
import edu.harvard.iq.dataverse.util.SystemConfig;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonAsDatasetDto;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.json.JsonPrinter;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
Expand All @@ -23,9 +22,6 @@
import java.util.List;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import javax.ejb.EJB;
import javax.ejb.TransactionAttribute;
import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;

Expand All @@ -37,12 +33,22 @@ public class ExportService {

private static ExportService service;
private ServiceLoader<Exporter> loader;
static SettingsServiceBean settingsService;

private ExportService() {
loader = ServiceLoader.load(Exporter.class);
}

/**
* TODO: Audit all calls to this getInstance method that doesn't take a
* SettingsServiceBean as an argument to make sure nothing broke.
*/
public static synchronized ExportService getInstance() {
return getInstance(null);
}

public static synchronized ExportService getInstance(SettingsServiceBean settingsService) {
ExportService.settingsService = settingsService;
if (service == null) {
service = new ExportService();
} else{
Expand Down Expand Up @@ -127,7 +133,8 @@ public void exportAllFormats (Dataset dataset) throws ExportException {
if (releasedVersion == null) {
throw new ExportException("No released version for dataset "+dataset.getGlobalId());
}
final JsonObjectBuilder datasetAsJsonBuilder = jsonAsDatasetDto(releasedVersion);
JsonPrinter jsonPrinter = new JsonPrinter(settingsService);
final JsonObjectBuilder datasetAsJsonBuilder = jsonPrinter.jsonAsDatasetDto(releasedVersion);
JsonObject datasetAsJson = datasetAsJsonBuilder.build();

Iterator<Exporter> exporters = loader.iterator();
Expand Down Expand Up @@ -175,7 +182,8 @@ public void exportFormat(Dataset dataset, String formatName) throws ExportExcept
if (releasedVersion == null) {
throw new IllegalStateException("No Released Version");
}
final JsonObjectBuilder datasetAsJsonBuilder = jsonAsDatasetDto(releasedVersion);
JsonPrinter jsonPrinter = new JsonPrinter(settingsService);
final JsonObjectBuilder datasetAsJsonBuilder = jsonPrinter.jsonAsDatasetDto(releasedVersion);
cacheExport(releasedVersion, formatName, datasetAsJsonBuilder.build(), e);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,13 @@
import com.google.gson.Gson;
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataTable;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetFieldConstant;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.api.dto.DataVariableDTO;
import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
import edu.harvard.iq.dataverse.api.dto.FieldDTO;
import edu.harvard.iq.dataverse.api.dto.FileDTO;
import edu.harvard.iq.dataverse.api.dto.FileMetadataDTO;
import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
import edu.harvard.iq.dataverse.datavariable.DataVariable;
import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
Expand All @@ -29,13 +26,11 @@
import edu.harvard.iq.dataverse.util.xml.XmlPrinter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
Expand Down Expand Up @@ -586,9 +581,10 @@ private static void writeContactsElement(XMLStreamWriter xmlw, DatasetVersionDTO
datasetContactAffiliation = next.getSinglePrimitive();
}
if (DatasetFieldConstant.datasetContactEmail.equals(next.getTypeName())) {
datasetContactEmail = next.getSinglePrimitive();
datasetContactEmail = next.getSinglePrimitive();
}
}
// TODO: Since datasetContactEmail is a required field but datasetContactName is not consider not checking if datasetContactName is empty so we can write out datasetContactEmail.
if (!datasetContactName.isEmpty()){
xmlw.writeStartElement("contact");
if(!datasetContactAffiliation.isEmpty()){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,11 @@ private Context createContext() {
}

private void addSupportedMetadataFormats(Context context) {
for (String[] provider : ExportService.getInstance().getExportersLabels()) {
for (String[] provider : ExportService.getInstance(settingsService).getExportersLabels()) {
String formatName = provider[1];
Exporter exporter;
try {
exporter = ExportService.getInstance().getExporter(formatName);
exporter = ExportService.getInstance(settingsService).getExporter(formatName);
} catch (ExportException ex) {
exporter = null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,6 @@ public enum Key {
SolrHostColonPort,
/** Key for limiting the number of bytes uploaded via the Data Deposit API, UI (web site and . */
MaxFileUploadSizeInBytes,
/**
* Experimental: Key for if DDI export is enabled or disabled.
*/
DdiExportEnabled,
/** Key for if ScrubMigrationData is enabled or disabled. */
ScrubMigrationData,
/** Key for the url to send users who want to sign up to. */
Expand Down Expand Up @@ -241,7 +237,11 @@ Whether Harvesting (OAI) service is enabled
/**
* Whether Shibboleth passive authentication mode is enabled
*/
ShibPassiveLoginEnabled;
ShibPassiveLoginEnabled,
/**
* Whether Export should exclude FieldType.EMAIL
*/
ExcludeEmailFromExport;

@Override
public String toString() {
Expand Down
Loading