Skip to content

Commit

Permalink
Merge 019f788 into 27d3767
Browse files Browse the repository at this point in the history
  • Loading branch information
GPortas committed Apr 23, 2024
2 parents 27d3767 + 019f788 commit a20eef4
Show file tree
Hide file tree
Showing 11 changed files with 357 additions and 138 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Changed ``api/dataverses/{id}/metadatablocks`` so that setting the query parameter ``onlyDisplayedOnCreate=true`` also returns metadata blocks with dataset field type input levels configured as required on the General Information page of the collection, in addition to the metadata blocks and their fields with the property ``displayOnCreate=true`` (which was the original behavior).

A new endpoint ``api/dataverses/{id}/inputLevels`` has been created for updating the dataset field type input levels of a collection via API.
41 changes: 40 additions & 1 deletion doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -898,7 +898,46 @@ The following attributes are supported:
* ``filePIDsEnabled`` ("true" or "false") Restricted to use by superusers and only when the :ref:`:AllowEnablingFilePIDsPerCollection <:AllowEnablingFilePIDsPerCollection>` setting is true. Enables or disables registration of file-level PIDs in datasets within the collection (overriding the instance-wide setting).

.. _collection-storage-quotas:


Update Collection Input Levels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Updates the dataset field type input levels in a collection.

Please note that this endpoint overwrites all the input levels of the collection page, so if you want to keep the existing ones, you will need to add them to the JSON request body.

If one of the input levels corresponds to a dataset field type belonging to a metadata block that does not exist in the collection, the metadata block will be added to the collection.

This endpoint expects a JSON with the following format::

[
{
"datasetFieldTypeName": "datasetFieldTypeName1",
"required": true,
"include": true
},
{
"datasetFieldTypeName": "datasetFieldTypeName2",
"required": true,
"include": true
}
]

.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=https://demo.dataverse.org
export ID=root
export JSON='[{"datasetFieldTypeName":"geographicCoverage", "required":true, "include":true}, {"datasetFieldTypeName":"country", "required":true, "include":true}]'
curl -X PUT -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" "$SERVER_URL/api/dataverses/$ID/inputLevels" -d "$JSON"
The fully expanded example above (without environment variables) looks like this:

.. code-block:: bash
curl -X PUT -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-Type:application/json" "https://demo.dataverse.org/api/dataverses/root/inputLevels" -d '[{"datasetFieldTypeName":"geographicCoverage", "required":true, "include":false}, {"datasetFieldTypeName":"country", "required":true, "include":false}]'
Collection Storage Quotas
~~~~~~~~~~~~~~~~~~~~~~~~~

Expand Down
8 changes: 8 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/Dataverse.java
Original file line number Diff line number Diff line change
Expand Up @@ -411,6 +411,14 @@ public List<DataverseFieldTypeInputLevel> getDataverseFieldTypeInputLevels() {
return dataverseFieldTypeInputLevels;
}

public boolean isDatasetFieldTypeRequiredAsInputLevel(Long datasetFieldTypeId) {
for(DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel : dataverseFieldTypeInputLevels) {
if (dataverseFieldTypeInputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId) && dataverseFieldTypeInputLevel.isRequired()) {
return true;
}
}
return false;
}

public Template getDefaultTemplate() {
return defaultTemplate;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,18 @@ public List<MetadataBlock> listMetadataBlocksDisplayedOnCreate(Dataverse ownerDa

if (ownerDataverse != null) {
Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);
Join<Dataverse, DataverseFieldTypeInputLevel> datasetFieldTypeInputLevelJoin = dataverseRoot.join("dataverseFieldTypeInputLevels", JoinType.LEFT);

Predicate requiredPredicate = criteriaBuilder.and(
datasetFieldTypeInputLevelJoin.get("datasetFieldType").in(metadataBlockRoot.get("datasetFieldTypes")),
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required")));

Predicate unionPredicate = criteriaBuilder.or(displayOnCreatePredicate, requiredPredicate);

criteriaQuery.where(criteriaBuilder.and(
criteriaBuilder.equal(dataverseRoot.get("id"), ownerDataverse.getId()),
metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")),
displayOnCreatePredicate
unionPredicate
));
} else {
criteriaQuery.where(displayOnCreatePredicate);
Expand Down
146 changes: 57 additions & 89 deletions src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
Original file line number Diff line number Diff line change
@@ -1,27 +1,10 @@
package edu.harvard.iq.dataverse.api;

import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetFieldType;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseFacet;
import edu.harvard.iq.dataverse.DataverseContact;
import edu.harvard.iq.dataverse.DataverseFeaturedDataverse;
import edu.harvard.iq.dataverse.DataverseLinkingServiceBean;
import edu.harvard.iq.dataverse.DataverseMetadataBlockFacet;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean;
import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.DvObject;
import edu.harvard.iq.dataverse.FeaturedDataverseServiceBean;
import edu.harvard.iq.dataverse.GlobalId;
import edu.harvard.iq.dataverse.GuestbookResponseServiceBean;
import edu.harvard.iq.dataverse.GuestbookServiceBean;
import edu.harvard.iq.dataverse.MetadataBlock;
import edu.harvard.iq.dataverse.RoleAssignment;

import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO;
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
Expand All @@ -37,46 +20,7 @@
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.dataverse.DataverseUtil;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.impl.AddRoleAssigneesToExplicitGroupCommand;
import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateExplicitGroupCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteCollectionQuotaCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseLinkingDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteExplicitGroupCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetSchemaCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionQuotaCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionStorageUseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetRootCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseStorageSizeCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetExplicitGroupCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ImportDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.LinkDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListDataverseContentCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListExplicitGroupsCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListFacetsCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListFeaturedCollectionsCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlockFacetsCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlocksCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments;
import edu.harvard.iq.dataverse.engine.command.impl.ListRolesCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult;
import edu.harvard.iq.dataverse.engine.command.impl.MoveDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.RemoveRoleAssigneesFromExplicitGroupCommand;
import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand;
import edu.harvard.iq.dataverse.engine.command.impl.SetCollectionQuotaCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseDefaultContributorRoleCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateExplicitGroupCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetsCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ValidateDatasetJsonCommand;
import edu.harvard.iq.dataverse.engine.command.impl.*;
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
import edu.harvard.iq.dataverse.settings.JvmSettings;
Expand All @@ -91,23 +35,14 @@
import edu.harvard.iq.dataverse.util.json.JsonPrinter;
import edu.harvard.iq.dataverse.util.json.JsonUtil;

import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeSet;
import java.io.StringReader;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import jakarta.ejb.EJB;
import jakarta.ejb.EJBException;
import jakarta.ejb.Stateless;
import jakarta.json.Json;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonNumber;
import jakarta.json.JsonObject;
import jakarta.json.JsonObjectBuilder;
import jakarta.json.JsonString;
import jakarta.json.JsonValue;
import jakarta.json.*;
import jakarta.json.JsonValue.ValueType;
import jakarta.json.stream.JsonParsingException;
import jakarta.validation.ConstraintViolationException;
Expand All @@ -131,16 +66,11 @@
import java.io.OutputStream;
import java.text.MessageFormat;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.ws.rs.WebApplicationException;
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.StreamingOutput;
import java.util.ArrayList;
import javax.xml.stream.XMLStreamException;

/**
Expand Down Expand Up @@ -172,10 +102,10 @@ public class Dataverses extends AbstractApiBean {

@EJB
DataverseServiceBean dataverseService;

@EJB
DataverseLinkingServiceBean linkingService;

@EJB
FeaturedDataverseServiceBean featuredDataverseService;

Expand Down Expand Up @@ -707,6 +637,43 @@ public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam
}
}

@PUT
@AuthRequired
@Path("{identifier}/inputLevels")
public Response updateInputLevels(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier, String jsonBody) {
try {
Dataverse dataverse = findDataverseOrDie(identifier);
List<DataverseFieldTypeInputLevel> newInputLevels = parseInputLevels(jsonBody, dataverse);
execCommand(new UpdateDataverseInputLevelsCommand(dataverse, createDataverseRequest(getRequestUser(crc)), newInputLevels));
return ok(BundleUtil.getStringFromBundle("dataverse.update.success"), JsonPrinter.json(dataverse));
} catch (WrappedResponse e) {
return e.getResponse();
}
}

private List<DataverseFieldTypeInputLevel> parseInputLevels(String jsonBody, Dataverse dataverse) throws WrappedResponse {
JsonArray inputLevelsArray = Json.createReader(new StringReader(jsonBody)).readArray();

List<DataverseFieldTypeInputLevel> newInputLevels = new ArrayList<>();
for (JsonValue value : inputLevelsArray) {
JsonObject inputLevel = (JsonObject) value;
String datasetFieldTypeName = inputLevel.getString("datasetFieldTypeName");
DatasetFieldType datasetFieldType = datasetFieldSvc.findByName(datasetFieldTypeName);

if (datasetFieldType == null) {
String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.updateinputlevels.error.invalidfieldtypename"), datasetFieldTypeName);
throw new WrappedResponse(badRequest(errorMessage));
}

boolean required = inputLevel.getBoolean("required");
boolean include = inputLevel.getBoolean("include");

newInputLevels.add(new DataverseFieldTypeInputLevel(datasetFieldType, dataverse, required, include));
}

return newInputLevels;
}

@DELETE
@AuthRequired
@Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}")
Expand All @@ -726,14 +693,15 @@ public Response listMetadataBlocks(@Context ContainerRequestContext crc,
@QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate,
@QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes) {
try {
Dataverse dataverse = findDataverseOrDie(dvIdtf);
final List<MetadataBlock> metadataBlocks = execCommand(
new ListMetadataBlocksCommand(
createDataverseRequest(getRequestUser(crc)),
findDataverseOrDie(dvIdtf),
dataverse,
onlyDisplayedOnCreate
)
);
return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate));
return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate, dataverse));
} catch (WrappedResponse we) {
return we.getResponse();
}
Expand Down Expand Up @@ -836,8 +804,8 @@ public Response listFacets(@Context ContainerRequestContext crc, @PathParam("ide
return e.getResponse();
}
}


@GET
@AuthRequired
@Path("{identifier}/featured")
Expand All @@ -860,19 +828,19 @@ public Response getFeaturedDataverses(@Context ContainerRequestContext crc, @Pat
return e.getResponse();
}
}


@POST
@AuthRequired
@Path("{identifier}/featured")
/**
* Allows user to set featured dataverses - must have edit dataverse permission
*
*
*/
public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String dvAliases) {
List<Dataverse> dvsFromInput = new LinkedList<>();


try {

for (JsonString dvAlias : Util.asJsonArray(dvAliases).getValuesAs(JsonString.class)) {
Expand All @@ -886,7 +854,7 @@ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @Pat
if (dvsFromInput.isEmpty()) {
return error(Response.Status.BAD_REQUEST, "Please provide a valid Json array of dataverse collection aliases to be featured.");
}

Dataverse dataverse = findDataverseOrDie(dvIdtf);
List<Dataverse> featuredSource = new ArrayList<>();
List<Dataverse> featuredTarget = new ArrayList<>();
Expand Down Expand Up @@ -919,15 +887,15 @@ public Response setFeaturedDataverses(@Context ContainerRequestContext crc, @Pat
// by passing null for Facets and DataverseFieldTypeInputLevel, those are not changed
execCommand(new UpdateDataverseCommand(dataverse, null, featuredTarget, createDataverseRequest(getRequestUser(crc)), null));
return ok("Featured Dataverses of dataverse " + dvIdtf + " updated.");

} catch (WrappedResponse ex) {
return ex.getResponse();
} catch (JsonParsingException jpe){
return error(Response.Status.BAD_REQUEST, "Please provide a valid Json array of dataverse collection aliases to be featured.");
}

}

@DELETE
@AuthRequired
@Path("{identifier}/featured")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import jakarta.persistence.TypedQuery;

/**
* Update an existing dataverse.
Expand All @@ -30,10 +29,10 @@ public class UpdateDataverseCommand extends AbstractCommand<Dataverse> {

private final Dataverse editedDv;
private final List<DatasetFieldType> facetList;
private final List<Dataverse> featuredDataverseList;
private final List<DataverseFieldTypeInputLevel> inputLevelList;
private boolean datasetsReindexRequired = false;
private final List<Dataverse> featuredDataverseList;
private final List<DataverseFieldTypeInputLevel> inputLevelList;

private boolean datasetsReindexRequired = false;

public UpdateDataverseCommand(Dataverse editedDv, List<DatasetFieldType> facetList, List<Dataverse> featuredDataverseList,
DataverseRequest aRequest, List<DataverseFieldTypeInputLevel> inputLevelList ) {
Expand Down

0 comments on commit a20eef4

Please sign in to comment.