diff --git a/doc/sphinx-guides/source/admin/metadataexport.rst b/doc/sphinx-guides/source/admin/metadataexport.rst index 42c8eb1b6f0..a8931b87716 100644 --- a/doc/sphinx-guides/source/admin/metadataexport.rst +++ b/doc/sphinx-guides/source/admin/metadataexport.rst @@ -15,9 +15,9 @@ Batch exports through the API In addition to the automated exports, a Dataverse admin can start a batch job through the API. The following 2 API calls are provided: -/api/datasets/exportAll?key=... +/api/admin/metadata/exportAll -/api/datasets/reExportAll?key=... +/api/admin/metadata/reExportAll The former will attempt to export all the published, local (non-harvested) datasets that haven't been exported yet. The latter will *force* a re-export of every published, local dataset, regardless of whether it has already been exported or not. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 2342d447949..f0af8490c3d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -221,6 +221,12 @@ public Response createDataset(String jsonBody) { return okResponse(jsonbuilder.add("latestVersion", json(importedDataset.getLatestVersion()))); } */ + // TODO: + // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand + // to obtain the dataset that we are trying to export - which would handle + // Auth in the process... For now, Auth isn't necessary - since export ONLY + // WORKS on published datasets, which are open to the world. -- L.A. 4.5 + @GET @Path("/export") @Produces({"application/xml", "application/json"}) @@ -260,32 +266,6 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @ } } - // The following 2 commands start export all jobs in the background, - // asynchronously. - // (These API calls should probably not be here; - // May be under "/admin" somewhere?) - // exportAll will attempt to go through all the published, local - // datasets *that haven't been exported yet* - which is determined by - // checking the lastexporttime value of the dataset; if it's null, or < the last - // publication date = "unexported" - and export them. - @GET - @Path("/exportAll") - @Produces("application/json") - public Response exportAll() { - datasetService.exportAllAsync(); - return this.accepted(); - } - - // reExportAll will FORCE A FULL REEXPORT on every published, local - // dataset, regardless of the lastexporttime value. - @GET - @Path("/reExportAll") - @Produces("application/json") - public Response reExportAll() { - datasetService.reExportAllAsync(); - return this.accepted(); - } - @DELETE @Path("{id}") public Response deleteDataset( @PathParam("id") String id) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java new file mode 100644 index 00000000000..b77954bf1a5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java @@ -0,0 +1,54 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.api; + +import edu.harvard.iq.dataverse.DatasetServiceBean; +import java.util.logging.Logger; +import javax.ejb.EJB; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Response; + +/** + * + * @author Leonid Andreev + * + */ + +@Path("admin/metadata") +public class Metadata extends AbstractApiBean { + private static final Logger logger = Logger.getLogger(Metadata.class.getName()); + + @EJB + DatasetServiceBean datasetService; + + // The following 2 commands start export all jobs in the background, + // asynchronously. + // (These API calls should probably not be here; + // May be under "/admin" somewhere?) + // exportAll will attempt to go through all the published, local + // datasets *that haven't been exported yet* - which is determined by + // checking the lastexporttime value of the dataset; if it's null, or < the last + // publication date = "unexported" - and export them. + @GET + @Path("/exportAll") + @Produces("application/json") + public Response exportAll() { + datasetService.exportAllAsync(); + return this.accepted(); + } + + // reExportAll will FORCE A FULL REEXPORT on every published, local + // dataset, regardless of the lastexporttime value. + @GET + @Path("/reExportAll") + @Produces("application/json") + public Response reExportAll() { + datasetService.reExportAllAsync(); + return this.accepted(); + } +}