From 9d846d2455e820cc9312863079086c66b0799c7a Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Tue, 26 Sep 2023 09:13:13 +0200 Subject: [PATCH 01/13] fix: require ManageDatasetPermissions for listing role assignments on datasets --- .../engine/command/impl/ListRoleAssignments.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java index 1858ba377ab..b619d32cc7e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java @@ -6,16 +6,18 @@ import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Collections; /** * * @author michael */ -@RequiredPermissions( Permission.ManageDataversePermissions ) +// no annotations here, since permissions are dynamically decided public class ListRoleAssignments extends AbstractCommand> { private final DvObject definitionPoint; @@ -34,5 +36,12 @@ public List execute(CommandContext ctxt) throws CommandException } return ctxt.permissions().assignmentsOn(definitionPoint); } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + definitionPoint.isInstanceofDataset() ? Collections.singleton(Permission.ManageDatasetPermissions) + : Collections.singleton(Permission.ManageDataversePermissions)); + } } From 41e363e343861f6b416e6add60e60778f697cce0 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Tue, 26 Sep 2023 09:13:36 +0200 Subject: [PATCH 02/13] test: require ManageDatasetPermissions for listing role assignments on datasets --- scripts/api/data/role-contributor-plus.json | 12 +++ .../harvard/iq/dataverse/api/DatasetsIT.java | 87 +++++++++++++++++++ 2 files changed, 99 insertions(+) create mode 100644 scripts/api/data/role-contributor-plus.json diff --git a/scripts/api/data/role-contributor-plus.json b/scripts/api/data/role-contributor-plus.json new file mode 100644 index 00000000000..ef9ba3aaff6 --- /dev/null +++ b/scripts/api/data/role-contributor-plus.json @@ -0,0 +1,12 @@ +{ + "alias":"contributorPlus", + "name":"ContributorPlus", + "description":"For datasets, a person who can edit License + Terms, then submit them for review, and add collaborators.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft", + "ManageDatasetPermissions" + ] +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 3b6d4d1ecdf..b51d400d2d4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1296,6 +1296,93 @@ public void testAddRoles(){ } + @Test + public void testListRoleAssignments() { + Response createAdminUser = UtilIT.createRandomUser(); + String adminUsername = UtilIT.getUsernameFromResponse(createAdminUser); + String adminApiToken = UtilIT.getApiTokenFromResponse(createAdminUser); + UtilIT.makeSuperUser(adminUsername); + + Response createDataverseResponse = UtilIT.createRandomDataverse(adminApiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Now, let's allow anyone with a Dataverse account (any "random user") + // to create datasets in this dataverse: + + Response grantRole = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR, AuthenticatedUsers.get().getIdentifier(), adminApiToken); + grantRole.prettyPrint(); + assertEquals(OK.getStatusCode(), grantRole.getStatusCode()); + + Response createContributorUser = UtilIT.createRandomUser(); + String contributorUsername = UtilIT.getUsernameFromResponse(createContributorUser); + String contributorApiToken = UtilIT.getApiTokenFromResponse(createContributorUser); + + // First, we test listing role assignments on a dataverse which requires "ManageDataversePermissions" + + Response notPermittedToListRoleAssignmentOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, contributorApiToken); + assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataverse.getStatusCode()); + + Response roleAssignmentsOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, adminApiToken); + roleAssignmentsOnDataverse.prettyPrint(); + assertEquals(OK.getStatusCode(), roleAssignmentsOnDataverse.getStatusCode()); + + // Second, we test listing role assignments on a dataset which requires "ManageDatasetPermissions" + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, contributorApiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + logger.info("dataset id: " + datasetId); + + Response datasetAsJson = UtilIT.nativeGet(datasetId, adminApiToken); + datasetAsJson.then().assertThat() + .statusCode(OK.getStatusCode()); + + String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier"); + assertEquals(10, identifier.length()); + + String protocol1 = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.protocol"); + String authority1 = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.authority"); + String identifier1 = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier"); + String datasetPersistentId = protocol1 + ":" + authority1 + "/" + identifier1; + + Response notPermittedToListRoleAssignmentOnDataset = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, contributorApiToken); + assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataset.getStatusCode()); + + // We create a new role that includes "ManageDatasetPermissions" which are required for listing role assignments + // of a dataset and assign it to the contributor user + + String pathToJsonFile = "scripts/api/data/role-contributor-plus.json"; + Response addDataverseRoleResponse = UtilIT.addDataverseRole(pathToJsonFile, dataverseAlias, adminApiToken); + addDataverseRoleResponse.prettyPrint(); + String body = addDataverseRoleResponse.getBody().asString(); + String status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + + Response giveRandoPermission = UtilIT.grantRoleOnDataset(datasetPersistentId, "contributorPlus", "@" + contributorUsername, adminApiToken); + giveRandoPermission.prettyPrint(); + assertEquals(200, giveRandoPermission.getStatusCode()); + + // Contributor user should now be able to list dataset role assignments as well + + Response roleAssignmentsOnDataset = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, contributorApiToken); + roleAssignmentsOnDataset.prettyPrint(); + assertEquals(OK.getStatusCode(), roleAssignmentsOnDataset.getStatusCode()); + + // ...but not dataverse role assignments + + notPermittedToListRoleAssignmentOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, contributorApiToken); + assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataverse.getStatusCode()); + + // Finally, we clean up and delete the role we created + + Response deleteDataverseRoleResponse = UtilIT.deleteDataverseRole("contributorPlus", adminApiToken); + deleteDataverseRoleResponse.prettyPrint(); + body = deleteDataverseRoleResponse.getBody().asString(); + status = JsonPath.from(body).getString("status"); + assertEquals("OK", status); + } + @Test public void testFileChecksum() { From 0cb547bc8457cea05e44c6e56c9321925ff688bc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 21 Dec 2023 17:01:02 -0500 Subject: [PATCH 03/13] Create dirs for dataset as needed --- .../dataverse/globus/GlobusServiceBean.java | 108 ++++++++++++++---- 1 file changed, 88 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d0660a55a6a..61884045f35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -134,7 +134,7 @@ private String getRuleId(GlobusEndpoint endpoint, String principal, String permi * @param globusLogger - a separate logger instance, may be null */ public void deletePermission(String ruleId, Dataset dataset, Logger globusLogger) { - globusLogger.info("Start deleting rule " + ruleId + " for dataset " + dataset.getId()); + globusLogger.fine("Start deleting rule " + ruleId + " for dataset " + dataset.getId()); if (ruleId.length() > 0) { if (dataset != null) { GlobusEndpoint endpoint = getGlobusEndpoint(dataset); @@ -179,25 +179,95 @@ public JsonObject requestAccessiblePaths(String principal, Dataset dataset, int permissions.setPrincipal(principal); permissions.setPath(endpoint.getBasePath() + "/"); permissions.setPermissions("rw"); - + JsonObjectBuilder response = Json.createObjectBuilder(); - response.add("status", requestPermission(endpoint, dataset, permissions)); - String driverId = dataset.getEffectiveStorageDriverId(); - JsonObjectBuilder paths = Json.createObjectBuilder(); - for (int i = 0; i < numberOfPaths; i++) { - String storageIdentifier = DataAccess.getNewStorageIdentifier(driverId); - int lastIndex = Math.max(storageIdentifier.lastIndexOf("/"), storageIdentifier.lastIndexOf(":")); - paths.add(storageIdentifier, endpoint.getBasePath() + "/" + storageIdentifier.substring(lastIndex + 1)); + //Try to create the directory (202 status) if it does not exist (502-already exists) + int mkDirStatus = makeDirs(endpoint, dataset); + if (!(mkDirStatus== 202 || mkDirStatus == 502)) { + return response.add("status", mkDirStatus).build(); + } + //The dir for the dataset's data exists, so try to request permission for the principal + int requestPermStatus = requestPermission(endpoint, dataset, permissions); + response.add("status", requestPermStatus); + if (requestPermStatus == 201) { + String driverId = dataset.getEffectiveStorageDriverId(); + JsonObjectBuilder paths = Json.createObjectBuilder(); + for (int i = 0; i < numberOfPaths; i++) { + String storageIdentifier = DataAccess.getNewStorageIdentifier(driverId); + int lastIndex = Math.max(storageIdentifier.lastIndexOf("/"), storageIdentifier.lastIndexOf(":")); + paths.add(storageIdentifier, endpoint.getBasePath() + "/" + storageIdentifier.substring(lastIndex + 1)); + } + response.add("paths", paths.build()); } - response.add("paths", paths.build()); return response.build(); } + /** + * Call to create the directories for the specified dataset. + * + * @param dataset + * @return - an error status at whichever subdir the process fails at or the + * final success status + */ + private int makeDirs(GlobusEndpoint endpoint, Dataset dataset) { + logger.fine("Creating dirs: " + endpoint.getBasePath()); + int index = endpoint.getBasePath().lastIndexOf(dataset.getAuthorityForFileStorage()) + + dataset.getAuthorityForFileStorage().length(); + String nextDir = endpoint.getBasePath().substring(0, index); + int response = makeDir(endpoint, nextDir); + String identifier = dataset.getIdentifierForFileStorage(); + //Usually identifiers will have 0 or 1 slashes (e.g. FK2/ABCDEF) but the while loop will handle any that could have more + //Will skip if the first makeDir above failed + while ((identifier.length() > 0) && ((response == 202 || response == 502))) { + index = identifier.indexOf('/'); + if (index == -1) { + //Last dir to create + response = makeDir(endpoint, nextDir + "/" + identifier); + identifier = ""; + } else { + //The next dir to create + nextDir = nextDir + "/" + identifier.substring(0, index); + response = makeDir(endpoint, nextDir); + //The rest of the identifier + identifier = identifier.substring(index + 1); + } + } + return response; + } + + private int makeDir(GlobusEndpoint endpoint, String dir) { + MakeRequestResponse result = null; + String body = "{\"DATA_TYPE\":\"mkdir\",\"path\":\"" + dir + "\"}"; + try { + logger.info(body); + URL url = new URL( + "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + "/mkdir"); + result = makeRequest(url, "Bearer", endpoint.getClientToken(), "POST", body); + + switch (result.status) { + case 202: + logger.fine("Dir " + dir + " was created successfully."); + break; + case 502: + logger.fine("Dir " + dir + " already exists."); + break; + default: + logger.warning("Status " + result.status + " received when creating dir " + dir); + logger.fine("Response: " + result.jsonResponse); + } + } catch (MalformedURLException ex) { + // Misconfiguration + logger.warning("Failed to create dir on " + endpoint.getId()); + return 500; + } + return result.status; + } + private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) { Gson gson = new GsonBuilder().create(); MakeRequestResponse result = null; - logger.info("Start creating the rule"); + logger.fine("Start creating the rule"); try { URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access"); @@ -218,7 +288,7 @@ private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissi if (globusResponse != null && globusResponse.containsKey("access_id")) { permissions.setId(globusResponse.getString("access_id")); monitorTemporaryPermissions(permissions.getId(), dataset.getId()); - logger.info("Access rule " + permissions.getId() + " was created successfully"); + logger.fine("Access rule " + permissions.getId() + " was created successfully"); } else { // Shouldn't happen! logger.warning("Access rule id not returned for dataset " + dataset.getId()); @@ -363,7 +433,6 @@ private static MakeRequestResponse makeRequest(URL url, String authType, String try { connection = (HttpURLConnection) url.openConnection(); // Basic - logger.info(authType + " " + authCode); logger.fine("For URL: " + url.toString()); connection.setRequestProperty("Authorization", authType + " " + authCode); // connection.setRequestProperty("Content-Type", @@ -713,7 +782,7 @@ public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, S .mapToObj(index -> ((JsonObject) newfilesJsonArray.get(index)).getJsonObject(fileId)) .filter(Objects::nonNull).collect(Collectors.toList()); if (newfileJsonObject != null) { - logger.info("List Size: " + newfileJsonObject.size()); + logger.fine("List Size: " + newfileJsonObject.size()); // if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) { JsonPatch path = Json.createPatchBuilder() .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build(); @@ -884,7 +953,7 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro String taskIdentifier = jsonObject.getString("taskIdentifier"); GlobusEndpoint endpoint = getGlobusEndpoint(dataset); - logger.info("Endpoint path: " + endpoint.getBasePath()); + logger.fine("Endpoint path: " + endpoint.getBasePath()); // If the rules_cache times out, the permission will be deleted. Presumably that // doesn't affect a @@ -892,10 +961,10 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro GlobusTask task = getTask(endpoint.getClientToken(), taskIdentifier, globusLogger); String ruleId = getRuleId(endpoint, task.getOwner_id(), "r"); if (ruleId != null) { - logger.info("Found rule: " + ruleId); + logger.fine("Found rule: " + ruleId); Long datasetId = rulesCache.getIfPresent(ruleId); if (datasetId != null) { - logger.info("Deleting from cache: rule: " + ruleId); + logger.fine("Deleting from cache: rule: " + ruleId); // Will not delete rule rulesCache.invalidate(ruleId); } @@ -909,7 +978,7 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro // Transfer is done (success or failure) so delete the rule if (ruleId != null) { - logger.info("Deleting: rule: " + ruleId); + logger.fine("Deleting: rule: " + ruleId); deletePermission(ruleId, dataset, globusLogger); } @@ -1032,7 +1101,6 @@ public JsonObject calculateMissingMetadataFields(List inputList, Logger } private CompletableFuture calculateDetailsAsync(String id, Logger globusLogger) { - // logger.info(" calcualte additional details for these globus id ==== " + id); return CompletableFuture.supplyAsync(() -> { try { @@ -1071,7 +1139,7 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger) count = 3; } catch (IOException ioex) { count = 3; - logger.info(ioex.getMessage()); + logger.fine(ioex.getMessage()); globusLogger.info( "DataFile (fullPath " + fullPath + ") does not appear to be accessible within Dataverse: "); } catch (Exception ex) { From 826d4bdcd2d0418c8d65c8409107de0d66f6dd19 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 5 Jan 2024 17:46:26 -0500 Subject: [PATCH 04/13] per QA --- doc/sphinx-guides/source/developers/globus-api.rst | 1 + .../java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index de9df06a798..2f922fb1fc0 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -2,6 +2,7 @@ Globus Transfer API =================== The Globus API addresses three use cases: + * Transfer to a Dataverse-managed Globus endpoint (File-based or using the Globus S3 Connector) * Reference of files that will remain in a remote Globus endpoint * Transfer from a Dataverse-managed Globus endpoint diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index 61884045f35..3e60441850b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -240,7 +240,7 @@ private int makeDir(GlobusEndpoint endpoint, String dir) { MakeRequestResponse result = null; String body = "{\"DATA_TYPE\":\"mkdir\",\"path\":\"" + dir + "\"}"; try { - logger.info(body); + logger.fine(body); URL url = new URL( "https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint.getId() + "/mkdir"); result = makeRequest(url, "Bearer", endpoint.getClientToken(), "POST", body); From 94570f0c670e6d39594c5cfb9ca5233962834de0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 10 Jan 2024 10:59:21 -0500 Subject: [PATCH 05/13] add toc to docs #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 2f922fb1fc0..b5d420467aa 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -1,6 +1,9 @@ Globus Transfer API =================== +.. contents:: |toctitle| + :local: + The Globus API addresses three use cases: * Transfer to a Dataverse-managed Globus endpoint (File-based or using the Globus S3 Connector) From b1bb6a047cc347a6d6c97ba9f56060d3805ec545 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 11:35:34 -0500 Subject: [PATCH 06/13] minor doc tweaks #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index b5d420467aa..96475f33230 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -72,7 +72,7 @@ The response includes the id for the Globus endpoint to use along with several s The getDatasetMetadata and getFileListing URLs are just signed versions of the standard Dataset metadata and file listing API calls. The other two are Globus specific. -If called for a dataset using a store that is configured with a remote Globus endpoint(s), the return response is similar but the response includes a +If called for, a dataset using a store that is configured with a remote Globus endpoint(s), the return response is similar but the response includes a the "managed" parameter will be false, the "endpoint" parameter is replaced with a JSON array of "referenceEndpointsWithPaths" and the requestGlobusTransferPaths and addGlobusFiles URLs are replaced with ones for requestGlobusReferencePaths and addFiles. All of these calls are described further below. @@ -91,7 +91,7 @@ The returned response includes the same getDatasetMetadata and getFileListing UR Performing an Upload/Transfer In -------------------------------- -The information from the API call above can be used to provide a user with information about the dataset and to prepare to transfer or to reference files (based on the "managed" parameter). +The information from the API call above can be used to provide a user with information about the dataset and to prepare to transfer (managed=true) or to reference files (managed=false). Once the user identifies which files are to be added, the requestGlobusTransferPaths or requestGlobusReferencePaths URLs can be called. These both reference the same API call but must be used with different entries in the JSON body sent: From 1c3162f01cb921b21a72042ea03b1e9ca94c6da9 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 11:49:01 -0500 Subject: [PATCH 07/13] typo #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 96475f33230..57748d0afc9 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -170,7 +170,7 @@ In the managed case, once a Globus transfer has been initiated a final API call curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA" -Note that the mimetype is multipart/form-data, matching the /addFiles API call. ALso note that the API_TOKEN is not needed when using a signed URL. +Note that the mimetype is multipart/form-data, matching the /addFiles API call. Also note that the API_TOKEN is not needed when using a signed URL. With this information, Dataverse will begin to monitor the transfer and when it completes, will add all files for which the transfer succeeded. As the transfer can take significant time and the API call is asynchronous, the only way to determine if the transfer succeeded via API is to use the standard calls to check the dataset lock state and contents. From 8cc2e7c0e5ba16b2f380f8fd31531e1f90271c12 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 11:56:50 -0500 Subject: [PATCH 08/13] fix path in globus endpoint docs #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 57748d0afc9..a9cfe5aedff 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -102,7 +102,7 @@ Once the user identifies which files are to be added, the requestGlobusTransferP export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV export LOCALE=en-US - curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusUpload" + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusUploadPaths" Note that when using the dataverse-globus app or the return from the previous call, the URL for this call will be signed and no API_TOKEN is needed. From c3556e012a03b1e131146821faabb183b1a62a87 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 12:14:24 -0500 Subject: [PATCH 09/13] add missing trailing double quote #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index a9cfe5aedff..5a90243bd93 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -168,7 +168,7 @@ In the managed case, once a Globus transfer has been initiated a final API call "files": [{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b3972213f-f6b5c2221423", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "1234"}}, \ {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b39722140-50eb7d3c5ece", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "2345"}}]}' - curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA" + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles" -F "jsonData=$JSON_DATA" Note that the mimetype is multipart/form-data, matching the /addFiles API call. Also note that the API_TOKEN is not needed when using a signed URL. From 3a81926980edc7c8228dddf18a8f1305b32fc2c8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 15:40:14 -0500 Subject: [PATCH 10/13] add requestGlobusUploadPaths to UtilIT #10200 --- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index e29677c2252..33dda05b4d7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3718,4 +3718,12 @@ static Response requestGlobusDownload(Integer datasetId, JsonObject body, String .post("/api/datasets/" + datasetId + "/requestGlobusDownload"); } + static Response requestGlobusUploadPaths(Integer datasetId, JsonObject body, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(body.toString()) + .contentType("application/json") + .post("/api/datasets/" + datasetId + "/requestGlobusUploadPaths"); + } + } From 83120012480ce12ef8db3d33d3a1c93c4605945a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 11 Jan 2024 15:47:17 -0500 Subject: [PATCH 11/13] clarify where taskIdentifier comes from #10200 --- doc/sphinx-guides/source/developers/globus-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst index 5a90243bd93..834db8161f0 100644 --- a/doc/sphinx-guides/source/developers/globus-api.rst +++ b/doc/sphinx-guides/source/developers/globus-api.rst @@ -157,7 +157,7 @@ In the remote/reference case, the map is from the initially supplied endpoint/pa Adding Files to the Dataset --------------------------- -In the managed case, once a Globus transfer has been initiated a final API call is made to Dataverse to provide it with the task identifier of the transfer and information about the files being transferred: +In the managed case, you must initiate a Globus transfer and take note of its task identifier. As in the JSON example below, you will pass it as ``taskIdentifier`` along with details about the files you are transferring: .. code-block:: bash From d86ab1587cb5088330c2df6565744769cc859119 Mon Sep 17 00:00:00 2001 From: Vera Clemens Date: Fri, 12 Jan 2024 11:36:30 +0100 Subject: [PATCH 12/13] test: use curator role in testListRoleAssignments --- scripts/api/data/role-contributor-plus.json | 12 ---------- .../harvard/iq/dataverse/api/DatasetsIT.java | 22 ++++--------------- 2 files changed, 4 insertions(+), 30 deletions(-) delete mode 100644 scripts/api/data/role-contributor-plus.json diff --git a/scripts/api/data/role-contributor-plus.json b/scripts/api/data/role-contributor-plus.json deleted file mode 100644 index ef9ba3aaff6..00000000000 --- a/scripts/api/data/role-contributor-plus.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "alias":"contributorPlus", - "name":"ContributorPlus", - "description":"For datasets, a person who can edit License + Terms, then submit them for review, and add collaborators.", - "permissions":[ - "ViewUnpublishedDataset", - "EditDataset", - "DownloadFile", - "DeleteDatasetDraft", - "ManageDatasetPermissions" - ] -} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b51d400d2d4..787b9b018a9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1349,17 +1349,11 @@ public void testListRoleAssignments() { Response notPermittedToListRoleAssignmentOnDataset = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, contributorApiToken); assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataset.getStatusCode()); - // We create a new role that includes "ManageDatasetPermissions" which are required for listing role assignments - // of a dataset and assign it to the contributor user + // We assign the curator role to the contributor user + // (includes "ManageDatasetPermissions" which are required for listing role assignments of a dataset, but not + // "ManageDataversePermissions") - String pathToJsonFile = "scripts/api/data/role-contributor-plus.json"; - Response addDataverseRoleResponse = UtilIT.addDataverseRole(pathToJsonFile, dataverseAlias, adminApiToken); - addDataverseRoleResponse.prettyPrint(); - String body = addDataverseRoleResponse.getBody().asString(); - String status = JsonPath.from(body).getString("status"); - assertEquals("OK", status); - - Response giveRandoPermission = UtilIT.grantRoleOnDataset(datasetPersistentId, "contributorPlus", "@" + contributorUsername, adminApiToken); + Response giveRandoPermission = UtilIT.grantRoleOnDataset(datasetPersistentId, "curator", "@" + contributorUsername, adminApiToken); giveRandoPermission.prettyPrint(); assertEquals(200, giveRandoPermission.getStatusCode()); @@ -1373,14 +1367,6 @@ public void testListRoleAssignments() { notPermittedToListRoleAssignmentOnDataverse = UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, contributorApiToken); assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataverse.getStatusCode()); - - // Finally, we clean up and delete the role we created - - Response deleteDataverseRoleResponse = UtilIT.deleteDataverseRole("contributorPlus", adminApiToken); - deleteDataverseRoleResponse.prettyPrint(); - body = deleteDataverseRoleResponse.getBody().asString(); - status = JsonPath.from(body).getString("status"); - assertEquals("OK", status); } @Test From ff044632aff9c2b98aea01da934cfbf63476dc40 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 16 Jan 2024 11:32:17 -0500 Subject: [PATCH 13/13] add release note #9926 --- doc/release-notes/9926-list-role-assignments-permissions.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/9926-list-role-assignments-permissions.md diff --git a/doc/release-notes/9926-list-role-assignments-permissions.md b/doc/release-notes/9926-list-role-assignments-permissions.md new file mode 100644 index 00000000000..43cd83dc5c9 --- /dev/null +++ b/doc/release-notes/9926-list-role-assignments-permissions.md @@ -0,0 +1 @@ +Listing collction/dataverse role assignments via API still requires ManageDataversePermissions, but listing dataset role assignments via API now requires only ManageDatasetPermissions.