From de627912c50400cd8f2c412c1e453b70d67a22b0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 13 May 2022 14:51:53 -0400 Subject: [PATCH 01/18] Archival status success/pending/failure/null support --- .../harvard/iq/dataverse/DatasetVersion.java | 43 ++++++- .../dataverse/DatasetVersionServiceBean.java | 8 ++ .../harvard/iq/dataverse/api/Datasets.java | 107 ++++++++++++++++++ .../iq/dataverse/util/json/JsonUtil.java | 7 ++ 4 files changed, 164 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index faa91b87e12..d69b1d5ca8e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -6,11 +6,11 @@ import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.license.License; -import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.DateUtil; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.workflows.WorkflowComment; import java.io.Serializable; @@ -27,6 +27,7 @@ import javax.json.Json; import javax.json.JsonArray; import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.persistence.CascadeType; import javax.persistence.Column; @@ -94,6 +95,14 @@ public enum VersionState { public static final int ARCHIVE_NOTE_MAX_LENGTH = 1000; public static final int VERSION_NOTE_MAX_LENGTH = 1000; + //Archival copies: Status message required components + public static final String STATUS = "status"; + public static final String MESSAGE = "message"; + //Archival Copies: Allowed Statuses + public static final String PENDING = "pending"; + public static final String SUCCESS = "success"; + public static final String FAILURE = "failure"; + @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @@ -180,6 +189,8 @@ public enum VersionState { @Transient private DatasetVersionDifference dvd; + @Transient + private JsonObject archivalStatus; public Long getId() { return this.id; @@ -319,9 +330,39 @@ public void setArchiveNote(String note) { public String getArchivalCopyLocation() { return archivalCopyLocation; } + + public String getArchivalCopyLocationStatus() { + populateArchivalStatus(false); + + if(archivalStatus!=null) { + return archivalStatus.getString(STATUS); + } + return null; + } + public String getArchivalCopyLocationMessage() { + populateArchivalStatus(false); + if(archivalStatus!=null) { + return archivalStatus.getString(MESSAGE); + } + return null; + } + + private void populateArchivalStatus(boolean force) { + if(archivalStatus ==null || force) { + if(archivalCopyLocation!=null) { + try { + archivalStatus = JsonUtil.getJsonObject(archivalCopyLocation); + } catch(Exception e) { + logger.warning("DatasetVersion id: " + id + "has a non-JsonObject value, parsing error: " + e.getMessage()); + logger.info(archivalCopyLocation); + } + } + } + } public void setArchivalCopyLocation(String location) { this.archivalCopyLocation = location; + populateArchivalStatus(true); } public String getDeaccessionLink() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 580d95b4b1d..df787ae1391 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1187,4 +1187,12 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) return null; } + /** + * Merges the passed datasetversion to the persistence context. + * @param ver the DatasetVersion whose new state we want to persist. + * @return The managed entity representing {@code ver}. + */ + public DatasetVersion merge( DatasetVersion ver ) { + return em.merge(ver); + } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 153d3f266b1..eac4a8f0d44 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -87,6 +87,7 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.search.IndexServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; @@ -216,6 +217,9 @@ public class Datasets extends AbstractApiBean { @Inject DataverseRoleServiceBean dataverseRoleService; + @EJB + DatasetVersionServiceBean datasetversionService; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -3282,4 +3286,107 @@ public Response getCurationStates() throws WrappedResponse { csvSB.append("\n"); return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv"); } + + //APIs to manage archival status + + @GET + @Produces(MediaType.APPLICATION_JSON) + @Path("/submitDatasetVersionToArchive/{id}/{version}/status") + public Response getDatasetVersionToArchiveStatus(@PathParam("id") String dsid, + @PathParam("version") String versionNumber) { + + try { + AuthenticatedUser au = findAuthenticatedUserOrDie(); + if (!au.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + Dataset ds = findDatasetOrDie(dsid); + + DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); + if (dv.getArchivalCopyLocation() == null) { + return error(Status.NO_CONTENT, "This dataset version has not been archived"); + } else { + JsonObject status = JsonUtil.getJsonObject(dv.getArchivalCopyLocation()); + return ok(status); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @PUT + @Consumes(MediaType.APPLICATION_JSON) + @Path("/submitDatasetVersionToArchive/{id}/{version}/status") + public Response setDatasetVersionToArchiveStatus(@PathParam("id") String dsid, + @PathParam("version") String versionNumber, JsonObject update) { + + logger.info(JsonUtil.prettyPrint(update)); + try { + AuthenticatedUser au = findAuthenticatedUserOrDie(); + + if (!au.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + if (update.containsKey(DatasetVersion.STATUS) + && update.containsKey(DatasetVersion.MESSAGE)) { + String status = update.getString(DatasetVersion.STATUS); + if (status.equals(DatasetVersion.PENDING) + || status.equals(DatasetVersion.FAILURE) + || status.equals(DatasetVersion.SUCCESS)) { + + try { + Dataset ds; + + ds = findDatasetOrDie(dsid); + + DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); + if(dv==null) { + return error(Status.NOT_FOUND, "Dataset version not found"); + } + + dv.setArchivalCopyLocation(JsonUtil.prettyPrint(update)); + dv = datasetversionService.merge(dv); + logger.info("location now: " + dv.getArchivalCopyLocation()); + logger.info("status now: " + dv.getArchivalCopyLocationStatus()); + logger.info("message now: " + dv.getArchivalCopyLocationMessage()); + + return ok("Status updated"); + + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + } + return error(Status.BAD_REQUEST, "Unacceptable status format"); + } + + @DELETE + @Produces(MediaType.APPLICATION_JSON) + @Path("/submitDatasetVersionToArchive/{id}/{version}/status") + public Response deleteDatasetVersionToArchiveStatus(@PathParam("id") String dsid, + @PathParam("version") String versionNumber) { + + try { + AuthenticatedUser au = findAuthenticatedUserOrDie(); + if (!au.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + Dataset ds = findDatasetOrDie(dsid); + + DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); + if (dv == null) { + return error(Status.NOT_FOUND, "Dataset version not found"); + } + dv.setArchivalCopyLocation(null); + dv = datasetversionService.merge(dv); + + return ok("Status deleted"); + + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java index ae6935945e8..f4a3c635f8b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java @@ -3,6 +3,8 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonObject; + +import java.io.StringReader; import java.io.StringWriter; import java.util.HashMap; import java.util.Map; @@ -56,4 +58,9 @@ public static String prettyPrint(javax.json.JsonObject jsonObject) { return stringWriter.toString(); } + public static javax.json.JsonObject getJsonObject(String serializedJson) { + try (StringReader rdr = new StringReader(serializedJson)) { + return Json.createReader(rdr).readObject(); + } + } } From 8c82c61565e3cab7108b5641dee0a3a80ec215c9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 13 May 2022 15:21:02 -0400 Subject: [PATCH 02/18] flyway to update existing --- .../db/migration/V5.10.1.0.2__8605-support-archival-status.sql | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql diff --git a/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql b/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql new file mode 100644 index 00000000000..8f2c6201a16 --- /dev/null +++ b/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql @@ -0,0 +1,2 @@ +UPDATE datasetversion SET archivalCopyLocation = CONCAT('{"status":"success", "Message":"', archivalCopyLocation,'"}') where archivalCopyLocation is not null and not archivalCopyLocation='Attempted'; +UPDATE datasetversion SET archivalCopyLocation = CONCAT('{"status":"failure", "Message":"Attempted"}') where archivalCopyLocation is not null; From b354bc3ea530339a191e768311409ef2963c2ad3 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 13 May 2022 15:29:47 -0400 Subject: [PATCH 03/18] fix typos/mistakes --- .../migration/V5.10.1.0.2__8605-support-archival-status.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql b/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql index 8f2c6201a16..cf708ad0ea9 100644 --- a/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql +++ b/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql @@ -1,2 +1,2 @@ -UPDATE datasetversion SET archivalCopyLocation = CONCAT('{"status":"success", "Message":"', archivalCopyLocation,'"}') where archivalCopyLocation is not null and not archivalCopyLocation='Attempted'; -UPDATE datasetversion SET archivalCopyLocation = CONCAT('{"status":"failure", "Message":"Attempted"}') where archivalCopyLocation is not null; +UPDATE datasetversion SET archivalCopyLocation = CONCAT('{"status":"success", "message":"', archivalCopyLocation,'"}') where archivalCopyLocation is not null and not archivalCopyLocation='Attempted'; +UPDATE datasetversion SET archivalCopyLocation = CONCAT('{"status":"failure", "message":"Attempted"}') where archivalCopyLocation='Attempted'; From 9c9ac65bbc503e6c922008728fcafe79787fcb6b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 13 May 2022 16:27:48 -0400 Subject: [PATCH 04/18] basic status logging in existing archivers --- .../impl/DuraCloudSubmitToArchiveCommand.java | 16 +++++++++++++++- .../impl/GoogleCloudSubmitToArchiveCommand.java | 17 +++++++++++++++-- .../impl/LocalSubmitToArchiveCommand.java | 15 ++++++++++++++- 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index f30183663e6..ea348686ebd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -25,6 +25,9 @@ import java.util.Map; import java.util.logging.Logger; +import javax.json.Json; +import javax.json.JsonObjectBuilder; + import org.apache.commons.codec.binary.Hex; import org.duracloud.client.ContentStore; import org.duracloud.client.ContentStoreManager; @@ -67,6 +70,11 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t .replace('.', '-').toLowerCase(); ContentStore store; + //Set a failure status that will be updated if we succeed + JsonObjectBuilder statusObject = Json.createObjectBuilder(); + statusObject.add(DatasetVersion.STATUS, DatasetVersion.FAILURE); + statusObject.add(DatasetVersion.MESSAGE, "Bag not transferred"); + try { /* * If there is a failure in creating a space, it is likely that a prior version @@ -134,6 +142,7 @@ public void run() { bagger.generateBag(out); } catch (Exception e) { logger.severe("Error creating bag: " + e.getMessage()); + statusObject.add(DatasetVersion.MESSAGE, "Could not create bag"); // TODO Auto-generated catch block e.printStackTrace(); throw new RuntimeException("Error creating bag: " + e.getMessage()); @@ -173,7 +182,9 @@ public void run() { sb.append("/duradmin/spaces/sm/"); sb.append(store.getStoreId()); sb.append("/" + spaceName + "/" + fileName); - dv.setArchivalCopyLocation(sb.toString()); + statusObject.add(DatasetVersion.STATUS, DatasetVersion.SUCCESS); + statusObject.add(DatasetVersion.MESSAGE, sb.toString()); + logger.fine("DuraCloud Submission step complete: " + sb.toString()); } catch (ContentStoreException | IOException e) { // TODO Auto-generated catch block @@ -200,6 +211,9 @@ public void run() { } catch (NoSuchAlgorithmException e) { logger.severe("MD5 MessageDigest not available!"); } + finally { + dv.setArchivalCopyLocation(statusObject.build().toString()); + } } else { logger.warning("DuraCloud Submision Workflow aborted: Dataset locked for finalizePublication, or because file validation failed"); return new Failure("Dataset locked"); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java index af4c960c2d6..d12e7563a1c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java @@ -28,6 +28,9 @@ import java.util.Map; import java.util.logging.Logger; +import javax.json.Json; +import javax.json.JsonObjectBuilder; + import org.apache.commons.codec.binary.Hex; import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.cloud.storage.Blob; @@ -54,6 +57,11 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t logger.fine("Project: " + projectName + " Bucket: " + bucketName); if (bucketName != null && projectName != null) { Storage storage; + //Set a failure status that will be updated if we succeed + JsonObjectBuilder statusObject = Json.createObjectBuilder(); + statusObject.add(DatasetVersion.STATUS, DatasetVersion.FAILURE); + statusObject.add(DatasetVersion.MESSAGE, "Bag not transferred"); + try { FileInputStream fis = new FileInputStream(System.getProperty("dataverse.files.directory") + System.getProperty("file.separator")+ "googlecloudkey.json"); storage = StorageOptions.newBuilder() @@ -68,7 +76,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t String spaceName = dataset.getGlobalId().asString().replace(':', '-').replace('/', '-') .replace('.', '-').toLowerCase(); - + DataCitation dc = new DataCitation(dv); Map metadata = dc.getDataCiteMetadata(); String dataciteXml = DOIDataCiteRegisterService.getMetadataFromDvObject( @@ -125,6 +133,7 @@ public void run() { bagger.setAuthenticationKey(token.getTokenString()); bagger.generateBag(out); } catch (Exception e) { + statusObject.add(DatasetVersion.MESSAGE, "Could not create bag"); logger.severe("Error creating bag: " + e.getMessage()); // TODO Auto-generated catch block e.printStackTrace(); @@ -203,7 +212,9 @@ public void run() { StringBuffer sb = new StringBuffer("https://console.cloud.google.com/storage/browser/"); sb.append(blobIdString); - dv.setArchivalCopyLocation(sb.toString()); + statusObject.add(DatasetVersion.STATUS, DatasetVersion.SUCCESS); + statusObject.add(DatasetVersion.MESSAGE, sb.toString()); + } catch (RuntimeException rte) { logger.severe("Error creating datacite xml file during GoogleCloud Archiving: " + rte.getMessage()); return new Failure("Error in generating datacite.xml file", @@ -219,6 +230,8 @@ public void run() { return new Failure("GoogleCloud Submission Failure", e.getLocalizedMessage() + ": check log for details"); + } finally { + dv.setArchivalCopyLocation(statusObject.build().toString()); } return WorkflowStepResult.OK; } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java index b336d9a77f9..b4555db287c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java @@ -19,6 +19,9 @@ import java.util.Map; import java.util.logging.Logger; +import javax.json.Json; +import javax.json.JsonObjectBuilder; + import java.io.File; import java.io.FileOutputStream; @@ -39,6 +42,12 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t logger.fine("In LocalCloudSubmitToArchive..."); String localPath = requestedSettings.get(":BagItLocalPath"); String zipName = null; + + //Set a failure status that will be updated if we succeed + JsonObjectBuilder statusObject = Json.createObjectBuilder(); + statusObject.add(DatasetVersion.STATUS, DatasetVersion.FAILURE); + statusObject.add(DatasetVersion.MESSAGE, "Bag not transferred"); + try { Dataset dataset = dv.getDataset(); @@ -68,7 +77,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t if (srcFile.renameTo(destFile)) { logger.fine("Localhost Submission step: Content Transferred"); - dv.setArchivalCopyLocation("file://" + zipName); + statusObject.add(DatasetVersion.STATUS, DatasetVersion.SUCCESS); + statusObject.add(DatasetVersion.MESSAGE, "file://" + zipName); } else { logger.warning("Unable to move " + zipName + ".partial to " + zipName); } @@ -80,7 +90,10 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t } catch (Exception e) { logger.warning("Failed to archive " + zipName + " : " + e.getLocalizedMessage()); e.printStackTrace(); + } finally { + dv.setArchivalCopyLocation(statusObject.build().toString()); } + return WorkflowStepResult.OK; } From 221ca0b041b1cbf002f8c2de246ed6360e0da14c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 13 May 2022 16:48:26 -0400 Subject: [PATCH 05/18] API docs --- doc/sphinx-guides/source/api/native-api.rst | 55 +++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 5c56166dd6a..8026039b7ca 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1873,6 +1873,61 @@ The API call requires a Json body that includes the list of the fileIds that the export JSON='{"fileIds":[300,301]}' curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" "$SERVER_URL/api/datasets/:persistentId/files/actions/:unset-embargo?persistentId=$PERSISTENT_IDENTIFIER" -d "$JSON" + + +Get the Archival Status of a Dataset By Version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Archiving is an optional feature that may be configured for a Dataverse instance. When enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials. + +/api/datasets/submitDatasetVersionToArchive/$dataset-id/$version/status returns the archival status of the specified dataset version. + +The response is a Json object that will contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV + export VERSION=1.0 + + curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/datasets/submitDatasetVersionToArchive/$VERSION/status?persistentId=$PERSISTENT_IDENTIFIER" + +Set the Archival Status of a Dataset By Version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Archiving is an optional feature that may be configured for a Dataverse instance. When enabled, this API call be used to set the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. + +/api/datasets/submitDatasetVersionToArchive/$dataset-id/$version/status sets the archival status of the specified dataset version. + +The body is a Json object that must contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV + export VERSION=1.0 + export JSON='{"status":"failure","message":"Something went wrong"}' + + curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" -X PUT "$SERVER_URL/api/datasets/submitDatasetVersionToArchive/$VERSION/status?persistentId=$PERSISTENT_IDENTIFIER" -d "$JSON" + +Delete the Archival Status of a Dataset By Version +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Archiving is an optional feature that may be configured for a Dataverse instance. When enabled, this API call be used to delete the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. + +/api/datasets/submitDatasetVersionToArchive/$dataset-id/$version/status deletes the archival status of the specified dataset version. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV + export VERSION=1.0 + + curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" -X DELETE "$SERVER_URL/api/datasets/submitDatasetVersionToArchive/$VERSION/status?persistentId=$PERSISTENT_IDENTIFIER" + Files ----- From cefa12c710e4b22504f36e4ffad5e2a179bf3657 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 26 May 2022 15:52:46 -0400 Subject: [PATCH 06/18] rename flyway --- ...val-status.sql => V5.10.1.3__8605-support-archival-status.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.10.1.0.2__8605-support-archival-status.sql => V5.10.1.3__8605-support-archival-status.sql} (100%) diff --git a/src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql b/src/main/resources/db/migration/V5.10.1.3__8605-support-archival-status.sql similarity index 100% rename from src/main/resources/db/migration/V5.10.1.0.2__8605-support-archival-status.sql rename to src/main/resources/db/migration/V5.10.1.3__8605-support-archival-status.sql From d3a7b04e75b436af3991b93e5f7205fd9a6ad169 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 14 Jul 2022 12:06:45 -0400 Subject: [PATCH 07/18] update flyway naming --- ...val-status.sql => V5.11.0.1__8605-support-archival-status.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V5.10.1.3__8605-support-archival-status.sql => V5.11.0.1__8605-support-archival-status.sql} (100%) diff --git a/src/main/resources/db/migration/V5.10.1.3__8605-support-archival-status.sql b/src/main/resources/db/migration/V5.11.0.1__8605-support-archival-status.sql similarity index 100% rename from src/main/resources/db/migration/V5.10.1.3__8605-support-archival-status.sql rename to src/main/resources/db/migration/V5.11.0.1__8605-support-archival-status.sql From 9223e7df02f9f829a0e333247971e57706803aa7 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 15 Jul 2022 17:21:48 -0400 Subject: [PATCH 08/18] updates per review --- doc/sphinx-guides/source/api/native-api.rst | 18 ++-- .../harvard/iq/dataverse/DatasetVersion.java | 19 ++-- .../harvard/iq/dataverse/api/Datasets.java | 93 ++++++++-------- .../impl/DuraCloudSubmitToArchiveCommand.java | 10 +- .../GoogleCloudSubmitToArchiveCommand.java | 10 +- .../impl/LocalSubmitToArchiveCommand.java | 8 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 100 +++++++++++++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 64 ++++++++++- 8 files changed, 242 insertions(+), 80 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c67ad3e1541..cab9a9bdf06 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1878,9 +1878,9 @@ The API call requires a Json body that includes the list of the fileIds that the Get the Archival Status of a Dataset By Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Archiving is an optional feature that may be configured for a Dataverse instance. When enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials. +Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials. -/api/datasets/submitDatasetVersionToArchive/$dataset-id/$version/status returns the archival status of the specified dataset version. +/api/datasets/$dataset-id/$version/archivalStatus returns the archival status of the specified dataset version. The response is a Json object that will contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: @@ -1891,14 +1891,14 @@ The response is a Json object that will contain a "status" which may be "success export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV export VERSION=1.0 - curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/datasets/submitDatasetVersionToArchive/$VERSION/status?persistentId=$PERSISTENT_IDENTIFIER" + curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/$VERSION/archivalStatus?persistentId=$PERSISTENT_IDENTIFIER" Set the Archival Status of a Dataset By Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Archiving is an optional feature that may be configured for a Dataverse instance. When enabled, this API call be used to set the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. +Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to set the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. -/api/datasets/submitDatasetVersionToArchive/$dataset-id/$version/status sets the archival status of the specified dataset version. +/api/datasets/$dataset-id/$version/archivalStatus sets the archival status of the specified dataset version. The body is a Json object that must contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: @@ -1910,14 +1910,14 @@ The body is a Json object that must contain a "status" which may be "success", " export VERSION=1.0 export JSON='{"status":"failure","message":"Something went wrong"}' - curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" -X PUT "$SERVER_URL/api/datasets/submitDatasetVersionToArchive/$VERSION/status?persistentId=$PERSISTENT_IDENTIFIER" -d "$JSON" + curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" -X PUT "$SERVER_URL/api/datasets/:persistentId/$VERSION/archivalStatus?persistentId=$PERSISTENT_IDENTIFIER" -d "$JSON" Delete the Archival Status of a Dataset By Version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Archiving is an optional feature that may be configured for a Dataverse instance. When enabled, this API call be used to delete the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. +Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to delete the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. -/api/datasets/submitDatasetVersionToArchive/$dataset-id/$version/status deletes the archival status of the specified dataset version. +/api/datasets/$dataset-id/$version/archivalStatus deletes the archival status of the specified dataset version. .. code-block:: bash @@ -1926,7 +1926,7 @@ Archiving is an optional feature that may be configured for a Dataverse instance export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV export VERSION=1.0 - curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" -X DELETE "$SERVER_URL/api/datasets/submitDatasetVersionToArchive/$VERSION/status?persistentId=$PERSISTENT_IDENTIFIER" + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/:persistentId/$VERSION/archivalStatus?persistentId=$PERSISTENT_IDENTIFIER" Files diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index d69b1d5ca8e..025628067d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -96,12 +96,12 @@ public enum VersionState { public static final int VERSION_NOTE_MAX_LENGTH = 1000; //Archival copies: Status message required components - public static final String STATUS = "status"; - public static final String MESSAGE = "message"; + public static final String ARCHIVAL_STATUS = "status"; + public static final String ARCHIVAL_STATUS_MESSAGE = "message"; //Archival Copies: Allowed Statuses - public static final String PENDING = "pending"; - public static final String SUCCESS = "success"; - public static final String FAILURE = "failure"; + public static final String ARCHIVAL_STATUS_PENDING = "pending"; + public static final String ARCHIVAL_STATUS_SUCCESS = "success"; + public static final String ARCHIVAL_STATUS_FAILURE = "failure"; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @@ -161,6 +161,11 @@ public enum VersionState { // removed pending further investigation (v4.13) private String archiveNote; + // Originally a simple string indicating the location of the archival copy. As + // of v5.12, repurposed to provide a more general json archival status (failure, + // pending, success) and message (serialized as a string). The archival copy + // location is now expected as the contents of the message for the status + // 'success'. See the /api/datasets/{id}/{version}/archivalStatus API calls for more details @Column(nullable=true, columnDefinition = "TEXT") private String archivalCopyLocation; @@ -335,14 +340,14 @@ public String getArchivalCopyLocationStatus() { populateArchivalStatus(false); if(archivalStatus!=null) { - return archivalStatus.getString(STATUS); + return archivalStatus.getString(ARCHIVAL_STATUS); } return null; } public String getArchivalCopyLocationMessage() { populateArchivalStatus(false); if(archivalStatus!=null) { - return archivalStatus.getString(MESSAGE); + return archivalStatus.getString(ARCHIVAL_STATUS_MESSAGE); } return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index eac4a8f0d44..fdfed163369 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2263,7 +2263,7 @@ public Response completeMPUpload(String partETagBody, @QueryParam("globalid") St eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); } for(PartETag et: eTagList) { - logger.info("Part: " + et.getPartNumber() + " : " + et.getETag()); + logger.fine("Part: " + et.getPartNumber() + " : " + et.getETag()); } } catch (JsonException je) { logger.info("Unable to parse eTags from: " + partETagBody); @@ -2528,7 +2528,7 @@ public Command handleLatestPublished() { if ( dsv == null || dsv.getId() == null ) { throw new WrappedResponse( notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found") ); } - if (dsv.isReleased()) { + if (dsv.isReleased()&& uriInfo!=null) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds); mdcLogService.logEntry(entry); } @@ -3286,27 +3286,28 @@ public Response getCurationStates() throws WrappedResponse { csvSB.append("\n"); return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv"); } - - //APIs to manage archival status - + + // APIs to manage archival status + @GET @Produces(MediaType.APPLICATION_JSON) - @Path("/submitDatasetVersionToArchive/{id}/{version}/status") - public Response getDatasetVersionToArchiveStatus(@PathParam("id") String dsid, - @PathParam("version") String versionNumber) { + @Path("/{id}/{version}/archivalStatus") + public Response getDatasetVersionArchivalStatus(@PathParam("id") String datasetId, + @PathParam("version") String versionNumber, @Context UriInfo uriInfo, @Context HttpHeaders headers) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); if (!au.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } - Dataset ds = findDatasetOrDie(dsid); - - DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); - if (dv.getArchivalCopyLocation() == null) { + DataverseRequest req = createDataverseRequest(au); + DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, + headers); + + if (dsv.getArchivalCopyLocation() == null) { return error(Status.NO_CONTENT, "This dataset version has not been archived"); } else { - JsonObject status = JsonUtil.getJsonObject(dv.getArchivalCopyLocation()); + JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation()); return ok(status); } } catch (WrappedResponse wr) { @@ -3316,72 +3317,68 @@ public Response getDatasetVersionToArchiveStatus(@PathParam("id") String dsid, @PUT @Consumes(MediaType.APPLICATION_JSON) - @Path("/submitDatasetVersionToArchive/{id}/{version}/status") - public Response setDatasetVersionToArchiveStatus(@PathParam("id") String dsid, - @PathParam("version") String versionNumber, JsonObject update) { + @Path("/{id}/{version}/archivalStatus") + public Response setDatasetVersionArchivalStatus(@PathParam("id") String datasetId, + @PathParam("version") String versionNumber, JsonObject update, @Context UriInfo uriInfo, + @Context HttpHeaders headers) { - logger.info(JsonUtil.prettyPrint(update)); + logger.fine(JsonUtil.prettyPrint(update)); try { AuthenticatedUser au = findAuthenticatedUserOrDie(); if (!au.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - if (update.containsKey(DatasetVersion.STATUS) - && update.containsKey(DatasetVersion.MESSAGE)) { - String status = update.getString(DatasetVersion.STATUS); - if (status.equals(DatasetVersion.PENDING) - || status.equals(DatasetVersion.FAILURE) - || status.equals(DatasetVersion.SUCCESS)) { - try { - Dataset ds; + if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) { + String status = update.getString(DatasetVersion.ARCHIVAL_STATUS); + if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE) + || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) { - ds = findDatasetOrDie(dsid); + DataverseRequest req = createDataverseRequest(au); + DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), + uriInfo, headers); - DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); - if(dv==null) { + if (dsv == null) { return error(Status.NOT_FOUND, "Dataset version not found"); } - dv.setArchivalCopyLocation(JsonUtil.prettyPrint(update)); - dv = datasetversionService.merge(dv); - logger.info("location now: " + dv.getArchivalCopyLocation()); - logger.info("status now: " + dv.getArchivalCopyLocationStatus()); - logger.info("message now: " + dv.getArchivalCopyLocationMessage()); - - return ok("Status updated"); + dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update)); + dsv = datasetversionService.merge(dsv); + logger.info("location now: " + dsv.getArchivalCopyLocation()); + logger.info("status now: " + dsv.getArchivalCopyLocationStatus()); + logger.info("message now: " + dsv.getArchivalCopyLocationMessage()); - } catch (WrappedResponse wr) { - return wr.getResponse(); + return ok("Status updated"); } } + } catch (WrappedResponse wr) { + return wr.getResponse(); } + return error(Status.BAD_REQUEST, "Unacceptable status format"); } @DELETE @Produces(MediaType.APPLICATION_JSON) - @Path("/submitDatasetVersionToArchive/{id}/{version}/status") - public Response deleteDatasetVersionToArchiveStatus(@PathParam("id") String dsid, - @PathParam("version") String versionNumber) { + @Path("/{id}/{version}/archivalStatus") + public Response deleteDatasetVersionArchivalStatus(@PathParam("id") String datasetId, + @PathParam("version") String versionNumber, @Context UriInfo uriInfo, @Context HttpHeaders headers) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); if (!au.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } - Dataset ds = findDatasetOrDie(dsid); - DatasetVersion dv = datasetversionService.findByFriendlyVersionNumber(ds.getId(), versionNumber); - if (dv == null) { + DataverseRequest req = createDataverseRequest(au); + DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, + headers); + if (dsv == null) { return error(Status.NOT_FOUND, "Dataset version not found"); } - dv.setArchivalCopyLocation(null); - dv = datasetversionService.merge(dv); + dsv.setArchivalCopyLocation(null); + dsv = datasetversionService.merge(dsv); return ok("Status deleted"); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index ea348686ebd..5629a9621d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -72,8 +72,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t ContentStore store; //Set a failure status that will be updated if we succeed JsonObjectBuilder statusObject = Json.createObjectBuilder(); - statusObject.add(DatasetVersion.STATUS, DatasetVersion.FAILURE); - statusObject.add(DatasetVersion.MESSAGE, "Bag not transferred"); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS, DatasetVersion.ARCHIVAL_STATUS_FAILURE); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, "Bag not transferred"); try { /* @@ -142,7 +142,7 @@ public void run() { bagger.generateBag(out); } catch (Exception e) { logger.severe("Error creating bag: " + e.getMessage()); - statusObject.add(DatasetVersion.MESSAGE, "Could not create bag"); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, "Could not create bag"); // TODO Auto-generated catch block e.printStackTrace(); throw new RuntimeException("Error creating bag: " + e.getMessage()); @@ -182,8 +182,8 @@ public void run() { sb.append("/duradmin/spaces/sm/"); sb.append(store.getStoreId()); sb.append("/" + spaceName + "/" + fileName); - statusObject.add(DatasetVersion.STATUS, DatasetVersion.SUCCESS); - statusObject.add(DatasetVersion.MESSAGE, sb.toString()); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS, DatasetVersion.ARCHIVAL_STATUS_SUCCESS); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, sb.toString()); logger.fine("DuraCloud Submission step complete: " + sb.toString()); } catch (ContentStoreException | IOException e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java index d12e7563a1c..5c4d25c94ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java @@ -59,8 +59,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t Storage storage; //Set a failure status that will be updated if we succeed JsonObjectBuilder statusObject = Json.createObjectBuilder(); - statusObject.add(DatasetVersion.STATUS, DatasetVersion.FAILURE); - statusObject.add(DatasetVersion.MESSAGE, "Bag not transferred"); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS, DatasetVersion.ARCHIVAL_STATUS_FAILURE); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, "Bag not transferred"); try { FileInputStream fis = new FileInputStream(System.getProperty("dataverse.files.directory") + System.getProperty("file.separator")+ "googlecloudkey.json"); @@ -133,7 +133,7 @@ public void run() { bagger.setAuthenticationKey(token.getTokenString()); bagger.generateBag(out); } catch (Exception e) { - statusObject.add(DatasetVersion.MESSAGE, "Could not create bag"); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, "Could not create bag"); logger.severe("Error creating bag: " + e.getMessage()); // TODO Auto-generated catch block e.printStackTrace(); @@ -212,8 +212,8 @@ public void run() { StringBuffer sb = new StringBuffer("https://console.cloud.google.com/storage/browser/"); sb.append(blobIdString); - statusObject.add(DatasetVersion.STATUS, DatasetVersion.SUCCESS); - statusObject.add(DatasetVersion.MESSAGE, sb.toString()); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS, DatasetVersion.ARCHIVAL_STATUS_SUCCESS); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, sb.toString()); } catch (RuntimeException rte) { logger.severe("Error creating datacite xml file during GoogleCloud Archiving: " + rte.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java index b4555db287c..c12bdc63981 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java @@ -45,8 +45,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t //Set a failure status that will be updated if we succeed JsonObjectBuilder statusObject = Json.createObjectBuilder(); - statusObject.add(DatasetVersion.STATUS, DatasetVersion.FAILURE); - statusObject.add(DatasetVersion.MESSAGE, "Bag not transferred"); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS, DatasetVersion.ARCHIVAL_STATUS_FAILURE); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, "Bag not transferred"); try { @@ -77,8 +77,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t if (srcFile.renameTo(destFile)) { logger.fine("Localhost Submission step: Content Transferred"); - statusObject.add(DatasetVersion.STATUS, DatasetVersion.SUCCESS); - statusObject.add(DatasetVersion.MESSAGE, "file://" + zipName); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS, DatasetVersion.ARCHIVAL_STATUS_SUCCESS); + statusObject.add(DatasetVersion.ARCHIVAL_STATUS_MESSAGE, "file://" + zipName); } else { logger.warning("Unable to move " + zipName + ".partial to " + zipName); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 5a2197af001..77a7a499961 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -57,6 +57,8 @@ import javax.json.Json; import javax.json.JsonArray; import javax.json.JsonObjectBuilder; +import javax.ws.rs.core.Response.Status; + import static javax.ws.rs.core.Response.Status.NO_CONTENT; import static javax.ws.rs.core.Response.Status.OK; import javax.xml.stream.XMLInputFactory; @@ -2809,5 +2811,101 @@ public void testRestrictFileTermsOfUseAndAccess() throws IOException { disallowRequestAccess.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } - + + /** + * In this test we do CRUD of archivalStatus (Note this and other archiving + * related tests are part of + * https://github.com/harvard-lts/hdc-integration-tests) + * + * This test requires the root dataverse to be published to pass. + */ + @Test + public void testArchivalStatusAPI() throws IOException { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + assertEquals(200, createUser.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + + Response createNoAccessUser = UtilIT.createRandomUser(); + createNoAccessUser.prettyPrint(); + String apiTokenNoAccess = UtilIT.getApiTokenFromResponse(createNoAccessUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJsonBeforePublishing.prettyPrint(); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()) + .getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()) + .getString("data.identifier"); + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + + Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + logger.info("Attempting to publish a major version"); + // Return random sleep 9/13/2019 + // Without it we've seen some DB deadlocks + // 3 second sleep, to allow the indexing to finish: + + try { + Thread.sleep(3000l); + } catch (InterruptedException iex) { + } + + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + assertEquals(200, publishDataset.getStatusCode()); + + // Now change the title + Response response = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, + "{\"title\": \"New Title\", \"@context\":{\"title\": \"http://purl.org/dc/terms/title\"}}", true); + response.then().assertThat().statusCode(OK.getStatusCode()); + + int status = Status.CONFLICT.getStatusCode(); + while (status == Status.CONFLICT.getStatusCode()) { + + Response publishV2 = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + status = response.thenReturn().statusCode(); + } + assertEquals(OK.getStatusCode(), status); + + if (!UtilIT.sleepForReindex(datasetPersistentId, apiToken, 3000)) { + logger.info("Still indexing after 3 seconds"); + } + + //Verify the status is empty + Response nullStatus = UtilIT.getDatasetVersionArchivalStatus(datasetId, "1.0", apiToken); + nullStatus.then().assertThat().statusCode(NO_CONTENT.getStatusCode()); + + //Set it + Response setStatus = UtilIT.setDatasetVersionArchivalStatus(datasetId, "1.0", apiToken, "pending", + "almost there"); + setStatus.then().assertThat().statusCode(OK.getStatusCode()); + + //Get it + Response getStatus = UtilIT.getDatasetVersionArchivalStatus(datasetId, "1.0", apiToken); + getStatus.then().assertThat().body("data.status", equalTo("pending")).body("data.message", + equalTo("almost there")); + + //Delete it + Response deleteStatus = UtilIT.deleteDatasetVersionArchivalStatus(datasetId, "1.0", apiToken); + deleteStatus.then().assertThat().statusCode(OK.getStatusCode()); + + //Make sure it's gone + Response nullStatus2 = UtilIT.getDatasetVersionArchivalStatus(datasetId, "1.0", apiToken); + nullStatus2.then().assertThat().statusCode(NO_CONTENT.getStatusCode()); + + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 19b94f34db7..b796c64b395 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1194,12 +1194,20 @@ static Response publishDatasetViaSword(String persistentId, String apiToken) { } static Response publishDatasetViaNativeApi(String idOrPersistentId, String majorOrMinor, String apiToken) { + return publishDatasetViaNativeApi(idOrPersistentId, majorOrMinor, apiToken, false); + } + + static Response publishDatasetViaNativeApi(String idOrPersistentId, String majorOrMinor, String apiToken, boolean mustBeIndexed) { + String idInPath = idOrPersistentId; // Assume it's a number. String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. - if (!NumberUtils.isNumber(idOrPersistentId)) { + if (!NumberUtils.isCreatable(idOrPersistentId)) { idInPath = ":persistentId"; optionalQueryParam = "&persistentId=" + idOrPersistentId; } + if(mustBeIndexed) { + optionalQueryParam = optionalQueryParam+"&assureIsIndexed=true"; + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() @@ -2384,6 +2392,27 @@ static Boolean sleepForLock(String idOrPersistentId, String lockType, String api } + static Boolean sleepForReindex(String idOrPersistentId, String apiToken, int duration) { + int i = 0; + Response timestampResponse; + do { + timestampResponse = UtilIT.getDatasetTimestamps(idOrPersistentId, apiToken); + try { + Thread.sleep(200); + i++; + if (i > duration) { + break; + } + } catch (InterruptedException ex) { + Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex); + } + } while (timestampResponse.body().jsonPath().getBoolean("hasStaleIndex")); + + return i <= duration; + + } + + //Helper function that returns true if a given search returns a non-zero response within a fixed time limit // a given duration returns false if still zero results after given duration static Boolean sleepForSearch(String searchPart, String apiToken, String subTree, int duration) { @@ -2474,6 +2503,20 @@ static Response unlockDataset(long datasetId, String lockType, String apiToken) return response; } + static Response getDatasetTimestamps(String idOrPersistentId, String apiToken) { + String idInPath = idOrPersistentId; // Assume it's a number. + String queryParams = ""; // If idOrPersistentId is a number we'll just put it in the path. + if (!NumberUtils.isCreatable(idOrPersistentId)) { + idInPath = ":persistentId"; + queryParams = "?persistentId=" + idOrPersistentId; + } + + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("api/datasets/" + idInPath + "/timestamps" + queryParams); + return response; + } + static Response exportOaiSet(String setName) { String apiPath = String.format("/api/admin/metadata/exportOAI/%s", setName); return given().put(apiPath); @@ -2866,6 +2909,25 @@ static Response setDatasetCurationLabel(Integer datasetId, String apiToken, Stri .put("/api/datasets/" + datasetId + "/curationStatus?label=" + label); return response; } + + static Response getDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/" + version); + return response; + } + static Response setDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken, String status, String message) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken).body("{\"status\":\"" + status + "\", \"message\":\"" + message + "\"}") + .put("/api/datasets/" + datasetId + "/" + version); + return response; + } + static Response deleteDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/datasets/" + datasetId + "/" + version); + return response; + } private static DatasetField constructPrimitive(String fieldName, String value) { DatasetField field = new DatasetField(); From f5396d85f92f8b497038cda9ca33886f26468911 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 15 Jul 2022 18:38:05 -0400 Subject: [PATCH 09/18] swap native update --- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 77a7a499961..4921bd882f8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2867,16 +2867,23 @@ public void testArchivalStatusAPI() throws IOException { Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); assertEquals(200, publishDataset.getStatusCode()); + String pathToJsonFileSingle = "doc/sphinx-guides/source/_static/api/dataset-simple-update-metadata.json"; + Response addSubjectSingleViaNative = UtilIT.updateFieldLevelDatasetMetadataViaNative(datasetPersistentId, pathToJsonFileSingle, apiToken); + addSubjectSingleViaNative.prettyPrint(); + addSubjectSingleViaNative.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Now change the title - Response response = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, - "{\"title\": \"New Title\", \"@context\":{\"title\": \"http://purl.org/dc/terms/title\"}}", true); - response.then().assertThat().statusCode(OK.getStatusCode()); +// Response response = UtilIT.updateDatasetJsonLDMetadata(datasetId, apiToken, +// "{\"title\": \"New Title\", \"@context\":{\"title\": \"http://purl.org/dc/terms/title\"}}", true); +// response.then().assertThat().statusCode(OK.getStatusCode()); int status = Status.CONFLICT.getStatusCode(); while (status == Status.CONFLICT.getStatusCode()) { Response publishV2 = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); - status = response.thenReturn().statusCode(); + status = publishV2.thenReturn().statusCode(); } assertEquals(OK.getStatusCode(), status); From 8750e62456c19a21467db9b690cd8e457ddf324d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 18 Jul 2022 17:20:41 -0400 Subject: [PATCH 10/18] missed logger.fine --- src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 025628067d9..510cb2866e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -359,7 +359,7 @@ private void populateArchivalStatus(boolean force) { archivalStatus = JsonUtil.getJsonObject(archivalCopyLocation); } catch(Exception e) { logger.warning("DatasetVersion id: " + id + "has a non-JsonObject value, parsing error: " + e.getMessage()); - logger.info(archivalCopyLocation); + logger.fine(archivalCopyLocation); } } } From 5d617f0fbd49e12b8da6d36efdf4b886dbec698e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Jul 2022 14:26:57 -0400 Subject: [PATCH 11/18] test tweak --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index b796c64b395..c8fb8abc183 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2392,7 +2392,7 @@ static Boolean sleepForLock(String idOrPersistentId, String lockType, String api } - static Boolean sleepForReindex(String idOrPersistentId, String apiToken, int duration) { + static boolean sleepForReindex(String idOrPersistentId, String apiToken, int duration) { int i = 0; Response timestampResponse; do { From 8fcb59c89fd454fc0dfffe894255c9bee8a2ccfa Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Jul 2022 14:47:24 -0400 Subject: [PATCH 12/18] fix jsonpath --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index c8fb8abc183..248287111d7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2397,6 +2397,7 @@ static boolean sleepForReindex(String idOrPersistentId, String apiToken, int dur Response timestampResponse; do { timestampResponse = UtilIT.getDatasetTimestamps(idOrPersistentId, apiToken); + try { Thread.sleep(200); i++; @@ -2406,7 +2407,7 @@ static boolean sleepForReindex(String idOrPersistentId, String apiToken, int dur } catch (InterruptedException ex) { Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex); } - } while (timestampResponse.body().jsonPath().getBoolean("hasStaleIndex")); + } while (timestampResponse.body().jsonPath().getBoolean("data.hasStaleIndex")); return i <= duration; From d2d817e9f70c39d03af65956c2d2b1034357d382 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Jul 2022 15:54:26 -0400 Subject: [PATCH 13/18] fix URLs --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 248287111d7..03cb85f357e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2914,19 +2914,19 @@ static Response setDatasetCurationLabel(Integer datasetId, String apiToken, Stri static Response getDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/" + datasetId + "/" + version); + .get("/api/datasets/" + datasetId + "/" + version + "/archivalStatus"); return response; } static Response setDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken, String status, String message) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken).body("{\"status\":\"" + status + "\", \"message\":\"" + message + "\"}") - .put("/api/datasets/" + datasetId + "/" + version); + .put("/api/datasets/" + datasetId + "/" + version + "/archivalStatus"); return response; } static Response deleteDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken) { Response response = given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .delete("/api/datasets/" + datasetId + "/" + version); + .delete("/api/datasets/" + datasetId + "/" + version + "/archivalStatus"); return response; } From 6a70d4247cc454e369025cb10638f85a520ccafe Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Jul 2022 16:01:55 -0400 Subject: [PATCH 14/18] add content type on set --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 03cb85f357e..d30ad8725cb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2919,7 +2919,7 @@ static Response getDatasetVersionArchivalStatus(Integer datasetId, String versio } static Response setDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken, String status, String message) { Response response = given() - .header(API_TOKEN_HTTP_HEADER, apiToken).body("{\"status\":\"" + status + "\", \"message\":\"" + message + "\"}") + .header(API_TOKEN_HTTP_HEADER, apiToken).contentType("application/ld+json; charset=utf-8").body("{\"status\":\"" + status + "\", \"message\":\"" + message + "\"}") .put("/api/datasets/" + datasetId + "/" + version + "/archivalStatus"); return response; } From e498417ca733c69fa7466c2977210e967864bfdf Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 19 Jul 2022 16:09:57 -0400 Subject: [PATCH 15/18] application/json --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index d30ad8725cb..c791ce72f41 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2919,7 +2919,7 @@ static Response getDatasetVersionArchivalStatus(Integer datasetId, String versio } static Response setDatasetVersionArchivalStatus(Integer datasetId, String version, String apiToken, String status, String message) { Response response = given() - .header(API_TOKEN_HTTP_HEADER, apiToken).contentType("application/ld+json; charset=utf-8").body("{\"status\":\"" + status + "\", \"message\":\"" + message + "\"}") + .header(API_TOKEN_HTTP_HEADER, apiToken).contentType("application/json; charset=utf-8").body("{\"status\":\"" + status + "\", \"message\":\"" + message + "\"}") .put("/api/datasets/" + datasetId + "/" + version + "/archivalStatus"); return response; } From 8a99685fb5f29a03e6542541fdfa139c5bbe3ec9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Jul 2022 16:47:38 -0400 Subject: [PATCH 16/18] in docs, show verbs for clarity, s/Json/JSON/ #8605 --- doc/sphinx-guides/source/api/native-api.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index cab9a9bdf06..9f36ad4c4cc 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1880,9 +1880,9 @@ Get the Archival Status of a Dataset By Version Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials. -/api/datasets/$dataset-id/$version/archivalStatus returns the archival status of the specified dataset version. +GET /api/datasets/$dataset-id/$version/archivalStatus returns the archival status of the specified dataset version. -The response is a Json object that will contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: +The response is a JSON object that will contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: .. code-block:: bash @@ -1898,9 +1898,9 @@ Set the Archival Status of a Dataset By Version Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to set the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. -/api/datasets/$dataset-id/$version/archivalStatus sets the archival status of the specified dataset version. +PUT /api/datasets/$dataset-id/$version/archivalStatus sets the archival status of the specified dataset version. -The body is a Json object that must contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: +The body is a JSON object that must contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: .. code-block:: bash @@ -1917,7 +1917,7 @@ Delete the Archival Status of a Dataset By Version Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to delete the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. -/api/datasets/$dataset-id/$version/archivalStatus deletes the archival status of the specified dataset version. +DELETE /api/datasets/$dataset-id/$version/archivalStatus deletes the archival status of the specified dataset version. .. code-block:: bash From 7362e1c82086bed520007d2bc13eb59695e0115f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 19 Jul 2022 16:49:39 -0400 Subject: [PATCH 17/18] lower logging #8605 --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index fdfed163369..04323f5cef8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3345,9 +3345,9 @@ public Response setDatasetVersionArchivalStatus(@PathParam("id") String datasetI dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update)); dsv = datasetversionService.merge(dsv); - logger.info("location now: " + dsv.getArchivalCopyLocation()); - logger.info("status now: " + dsv.getArchivalCopyLocationStatus()); - logger.info("message now: " + dsv.getArchivalCopyLocationMessage()); + logger.fine("location now: " + dsv.getArchivalCopyLocation()); + logger.fine("status now: " + dsv.getArchivalCopyLocationStatus()); + logger.fine("message now: " + dsv.getArchivalCopyLocationMessage()); return ok("Status updated"); } From 7410c5b5b48b1ddb62a6ed89a37d6da7d16fe0ac Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Jul 2022 16:12:30 -0400 Subject: [PATCH 18/18] format urls in docs --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 9f36ad4c4cc..4af6ea6948f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1861,7 +1861,7 @@ The API call requires a Json body that includes the embargo's end date (dateAvai Remove an Embargo on Files in a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -/api/datasets/$dataset-id/files/actions/:unset-embargo can be used to remove an embargo on one or more files in a dataset. Embargoes can be removed from files that are only in a draft dataset version (and are not in any previously published version) by anyone who can edit the dataset. The same API call can be used by a superuser to remove embargos from files that have already been released as part of a previously published dataset version. +``/api/datasets/$dataset-id/files/actions/:unset-embargo`` can be used to remove an embargo on one or more files in a dataset. Embargoes can be removed from files that are only in a draft dataset version (and are not in any previously published version) by anyone who can edit the dataset. The same API call can be used by a superuser to remove embargos from files that have already been released as part of a previously published dataset version. The API call requires a Json body that includes the list of the fileIds that the embargo should be removed from. All files listed must be in the specified dataset. For example: @@ -1880,7 +1880,7 @@ Get the Archival Status of a Dataset By Version Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials. -GET /api/datasets/$dataset-id/$version/archivalStatus returns the archival status of the specified dataset version. +``GET /api/datasets/$dataset-id/$version/archivalStatus`` returns the archival status of the specified dataset version. The response is a JSON object that will contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: @@ -1898,7 +1898,7 @@ Set the Archival Status of a Dataset By Version Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to set the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. -PUT /api/datasets/$dataset-id/$version/archivalStatus sets the archival status of the specified dataset version. +``PUT /api/datasets/$dataset-id/$version/archivalStatus`` sets the archival status of the specified dataset version. The body is a JSON object that must contain a "status" which may be "success", "pending", or "failure" and a "message" which is archive system specific. For "success" the message should provide an identifier or link to the archival copy. For example: @@ -1917,7 +1917,7 @@ Delete the Archival Status of a Dataset By Version Archiving is an optional feature that may be configured for a Dataverse instance. When that is enabled, this API call be used to delete the status. Note that this is intended to be used by the archival system and requires "superuser" credentials. -DELETE /api/datasets/$dataset-id/$version/archivalStatus deletes the archival status of the specified dataset version. +``DELETE /api/datasets/$dataset-id/$version/archivalStatus`` deletes the archival status of the specified dataset version. .. code-block:: bash