From 6eae5e4fec9be0435a91921881e0a64fab46dffd Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:12:32 -0500 Subject: [PATCH 001/366] implement batch processing of new versions to archive --- .../dataverse/DatasetVersionServiceBean.java | 26 ++++++++- .../edu/harvard/iq/dataverse/api/Admin.java | 57 +++++++++++++++++++ 2 files changed, 82 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index e4eb6aac88e..ea6a05a2c3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -987,7 +987,7 @@ public List> getBasicDatasetVersionInfo(Dataset dataset) - public HashMap getFileMetadataHistory(DataFile df){ + public HashMap getFileMetadataHistory(DataFile df){ if (df == null){ throw new NullPointerException("DataFile 'df' cannot be null"); @@ -1165,4 +1165,28 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) return null; } + /** + * Execute a query to return DatasetVersion + * + * @param queryString + * @return + */ + public List getUnarchivedDatasetVersions(){ + + String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;"; + + try{ + TypedQuery query = em.createQuery(queryString, DatasetVersion.class); + List dsl = query.getResultList(); + return dsl; + + } catch (javax.persistence.NoResultException e) { + logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString); + return null; + } catch (EJBException e) { + logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); + return null; + } + } // end getUnarchivedDatasetVersions + } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index b52665a7747..81fe1ecd2a9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1722,6 +1722,63 @@ public void run() { } } + + @GET + @Path("/archiveAllUnarchivedDataVersions") + public Response archiveAllUnarchivedDatasetVersions() { + + try { + AuthenticatedUser au = findAuthenticatedUserOrDie(); + // Note - the user is being set in the session so it becomes part of the + // DataverseRequest and is sent to the back-end command where it is used to get + // the API Token which is then used to retrieve files (e.g. via S3 direct + // downloads) to create the Bag + session.setUser(au); + List dsl = datasetversionService.getUnarchivedDatasetVersions(); + if (dsl != null) { + String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); + + if (cmd != null) { + new Thread(new Runnable() { + public void run() { + int total = dsl.size(); + int successes = 0; + int failures = 0; + for (DatasetVersion dv : dsl) { + try { + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); + + dv = commandEngine.submit(cmd); + if (dv.getArchivalCopyLocation() != null) { + successes++; + logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " + + dv.getArchivalCopyLocation()); + } else { + failures++; + logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + } + } catch (CommandException ex) { + logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); + } + logger.fine(successes + failures + " of " + total + " archive submissions complete"); + } + logger.info("Archiving complete: " + successes + " Successes, " + failures + " Failures. See prior log messages for details."); + } + }).start(); + return ok("Archiving all unarchived published dataset versions using " + cmd.getClass().getCanonicalName() + ". Processing can take significant time for large datasets/ large numbers of dataset versions. View log and/or check archive for results."); + } else { + logger.log(Level.SEVERE, "Could not find Archiver class: " + className); + return error(Status.INTERNAL_SERVER_ERROR, "Could not find Archiver class: " + className); + } + } else { + return error(Status.BAD_REQUEST, "No unarchived published dataset versions found"); + } + } catch (WrappedResponse e1) { + return error(Status.UNAUTHORIZED, "api key required"); + } + } + @DELETE @Path("/clearMetricsCache") public Response clearMetricsCache() { From 8313404e6604daba3ee53d32d9b09e83ebaae9f2 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:26:19 -0500 Subject: [PATCH 002/366] add listonly and limit options, count commandEx as failure --- .../edu/harvard/iq/dataverse/api/Admin.java | 24 ++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 81fe1ecd2a9..3c61d2e8919 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -46,6 +46,7 @@ import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; import javax.ws.rs.DELETE; +import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; @@ -1723,9 +1724,16 @@ public void run() { } + /** + * Iteratively archives all unarchived dataset versions + * @param + * listonly - don't archive, just list unarchived versions + * limit - max number to process + * @return + */ @GET @Path("/archiveAllUnarchivedDataVersions") - public Response archiveAllUnarchivedDatasetVersions() { + public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); @@ -1736,6 +1744,16 @@ public Response archiveAllUnarchivedDatasetVersions() { session.setUser(au); List dsl = datasetversionService.getUnarchivedDatasetVersions(); if (dsl != null) { + if (listonly) { + logger.info("Unarchived versions found: "); + int current = 0; + for (DatasetVersion dv : dsl) { + if (limit != null && current > limit) { + break; + } + logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + } + } String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); @@ -1746,6 +1764,9 @@ public void run() { int successes = 0; int failures = 0; for (DatasetVersion dv : dsl) { + if (limit != null && (successes + failures) > limit) { + break; + } try { AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); @@ -1759,6 +1780,7 @@ public void run() { logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); } } catch (CommandException ex) { + failures++; logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } logger.fine(successes + failures + " of " + total + " archive submissions complete"); From 70d923ae08b80d6248acc062ec836ed5812fa645 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:36:50 -0500 Subject: [PATCH 003/366] send list in response for listonly --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 3c61d2e8919..4fd3f43b127 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1745,14 +1745,17 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool List dsl = datasetversionService.getUnarchivedDatasetVersions(); if (dsl != null) { if (listonly) { + JsonArrayBuilder jab = Json.createArrayBuilder(); logger.info("Unarchived versions found: "); int current = 0; for (DatasetVersion dv : dsl) { if (limit != null && current > limit) { break; } + jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); } + return ok(jab); } String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); From 96d3723307c26668e5687f4ba61fb80d0d207a16 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 15:51:02 -0500 Subject: [PATCH 004/366] fix query --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index ea6a05a2c3c..344f8af3b87 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1173,10 +1173,10 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) */ public List getUnarchivedDatasetVersions(){ - String queryString = "select * from datasetversion where releasetime is not null and archivalcopylocation is null;"; + String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL"; try{ - TypedQuery query = em.createQuery(queryString, DatasetVersion.class); + TypedQuery query = em.createQuery(queryString, DatasetVersion.class); List dsl = query.getResultList(); return dsl; From cb9f374e6452cffa5069ef941a0a5f65a8248ca7 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:00:54 -0500 Subject: [PATCH 005/366] case sensitive in query --- .../edu/harvard/iq/dataverse/DatasetVersionServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 344f8af3b87..3f46a25c91e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -1173,7 +1173,7 @@ private DatasetVersion getPreviousVersionWithUnf(DatasetVersion datasetVersion) */ public List getUnarchivedDatasetVersions(){ - String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releasetime IS NOT NULL and o.archivalcopylocation IS NULL"; + String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL"; try{ TypedQuery query = em.createQuery(queryString, DatasetVersion.class); From 76e23960219f7cdf0cde5bede1cf8fda55fddd9e Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:24:13 -0500 Subject: [PATCH 006/366] param to only archive latest version --- .../edu/harvard/iq/dataverse/api/Admin.java | 38 +++++++++++-------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 4fd3f43b127..e06289dfac8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1729,11 +1729,12 @@ public void run() { * @param * listonly - don't archive, just list unarchived versions * limit - max number to process + * lastestonly - only archive the latest versions * @return */ @GET @Path("/archiveAllUnarchivedDataVersions") - public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit) { + public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { try { AuthenticatedUser au = findAuthenticatedUserOrDie(); @@ -1752,8 +1753,11 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool if (limit != null && current > limit) { break; } - jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); - logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { + jab.add(dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + logger.info(" " + dv.getDataset().getGlobalId().toString() + ", v" + dv.getFriendlyVersionNumber()); + current++; + } } return ok(jab); } @@ -1770,21 +1774,23 @@ public void run() { if (limit != null && (successes + failures) > limit) { break; } - try { - AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); - - dv = commandEngine.submit(cmd); - if (dv.getArchivalCopyLocation() != null) { - successes++; - logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " - + dv.getArchivalCopyLocation()); - } else { + if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { + try { + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); + + dv = commandEngine.submit(cmd); + if (dv.getArchivalCopyLocation() != null) { + successes++; + logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " + + dv.getArchivalCopyLocation()); + } else { + failures++; + logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + } + } catch (CommandException ex) { failures++; - logger.severe("Error submitting version due to conflict/error at Archive for " + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber()); + logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } - } catch (CommandException ex) { - failures++; - logger.log(Level.SEVERE, "Unexpected Exception calling submit archive command", ex); } logger.fine(successes + failures + " of " + total + " archive submissions complete"); } From 2e8d990ad4b75719c2d8e6b35a0f3d104822f3c3 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Dec 2020 16:41:58 -0500 Subject: [PATCH 007/366] off by one in limit --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index e06289dfac8..9f819ff13a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1750,7 +1750,7 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool logger.info("Unarchived versions found: "); int current = 0; for (DatasetVersion dv : dsl) { - if (limit != null && current > limit) { + if (limit != null && current >= limit) { break; } if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { @@ -1771,7 +1771,7 @@ public void run() { int successes = 0; int failures = 0; for (DatasetVersion dv : dsl) { - if (limit != null && (successes + failures) > limit) { + if (limit != null && (successes + failures) >= limit) { break; } if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { From b7968333b5950f44bbf086ebc1d020ee4ca4535f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 23 Dec 2020 11:52:43 -0500 Subject: [PATCH 008/366] documentation --- doc/sphinx-guides/source/installation/config.rst | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4a877eabff7..5b9433d7c31 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -866,9 +866,9 @@ For example: ``cp /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json`` -.. _Archiving API Call: +.. _Archiving API Calls: -API Call +API Calls ++++++++ Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing: @@ -881,6 +881,18 @@ where: ``{version}`` is the friendly version number, e.g. "1.2". +A batch API call is also available that will attempt to archive any currently unarchived dataset versions: + +``curl -H "X-Dataverse-key: " http://localhost:8080/api/admin/archiveAllUnarchivedDataVersions`` + +The call supports three optional query parameters that can be used in combination: + +``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any + +``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) + +``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call. + The submitDataVersionToArchive API (and the workflow discussed below) attempt to archive the dataset version via an archive specific method. For Chronopolis, a DuraCloud space named for the dataset (it's DOI with ':' and '.' replaced with '-') is created and two files are uploaded to it: a version-specific datacite.xml metadata file and a BagIt bag containing the data and an OAI-ORE map file. (The datacite.xml file, stored outside the Bag as well as inside is intended to aid in discovery while the ORE map file is 'complete', containing all user-entered metadata and is intended as an archival record.) In the Chronopolis case, since the transfer from the DuraCloud front-end to archival storage in Chronopolis can take significant time, it is currently up to the admin/curator to submit a 'snap-shot' of the space within DuraCloud and to monitor its successful transfer. Once transfer is complete the space should be deleted, at which point the Dataverse API call can be used to submit a Bag for other versions of the same Dataset. (The space is reused, so that archival copies of different Dataset versions correspond to different snapshots of the same DuraCloud space.). From 006a4baff870ebd1c11c86caaacaf96511fadd0c Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:28:55 -0500 Subject: [PATCH 009/366] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 5b9433d7c31..84ec0699d62 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -869,7 +869,7 @@ For example: .. _Archiving API Calls: API Calls -++++++++ ++++++++++ Once this configuration is complete, you, as a user with the *PublishDataset* permission, should be able to use the API call to manually submit a DatasetVersion for processing: From bba8ba0a13703410a9196713c6920150291d4643 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:29:20 -0500 Subject: [PATCH 010/366] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 84ec0699d62..a997f0e353f 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -887,7 +887,7 @@ A batch API call is also available that will attempt to archive any currently un The call supports three optional query parameters that can be used in combination: -``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any +``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any. ``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) From 011c97a4b73775cf152e0cf06127d8da9e8d2780 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Jan 2021 12:29:46 -0500 Subject: [PATCH 011/366] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index a997f0e353f..67ee66af763 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -889,7 +889,7 @@ The call supports three optional query parameters that can be used in combinatio ``listonly={true/false}`` default is false. Using true retrieves the list of unarchived versions but does not attempt to archive any. -``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions) +``latestonly={true/false}`` default is false. Using true only lists/processes the most recently published version of a given dataset (instead of all published versions). ``limit={n}`` default is no limit/process all unarchived versions (subject to other parameters). Defines a maximum number of versions to attempt to archive in response to one invocation of the API call. From 1a1c28ccb7a6c0427f349cd8569c516bca43bf68 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 8 Jan 2021 13:10:22 -0500 Subject: [PATCH 012/366] updates per review --- .../dataverse/DatasetVersionServiceBean.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 3f46a25c91e..33cc236b902 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -986,8 +986,8 @@ public List> getBasicDatasetVersionInfo(Dataset dataset) } // end getBasicDatasetVersionInfo - - public HashMap getFileMetadataHistory(DataFile df){ + //Not used? + public HashMap getFileMetadataHistory(DataFile df){ if (df == null){ throw new NullPointerException("DataFile 'df' cannot be null"); @@ -1175,18 +1175,18 @@ public List getUnarchivedDatasetVersions(){ String queryString = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL"; - try{ + try { TypedQuery query = em.createQuery(queryString, DatasetVersion.class); List dsl = query.getResultList(); return dsl; - + } catch (javax.persistence.NoResultException e) { logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}", queryString); return null; - } catch (EJBException e) { - logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); - return null; - } + } catch (EJBException e) { + logger.log(Level.WARNING, "EJBException exception: {0}", e.getMessage()); + return null; + } } // end getUnarchivedDatasetVersions - + } // end class From a849d6cb4a037f971075ff6838bbe3023ed4d953 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 30 Nov 2021 09:44:41 -0500 Subject: [PATCH 013/366] #8191 update ui and bundle, etc. --- .../edu/harvard/iq/dataverse/Dataset.java | 1 + .../edu/harvard/iq/dataverse/DatasetPage.java | 15 +++++- .../harvard/iq/dataverse/DatasetVersion.java | 28 +++++++++++ .../edu/harvard/iq/dataverse/Template.java | 1 + .../iq/dataverse/TermsOfUseAndAccess.java | 13 ++++++ .../TermsOfUseAndAccessValidator.java | 46 ++++++++++++++++--- src/main/java/propertyFiles/Bundle.properties | 4 +- src/main/webapp/dataset-license-terms.xhtml | 36 ++++++++++----- src/main/webapp/editFilesFragment.xhtml | 45 +++++++++++------- .../webapp/file-edit-popup-fragment.xhtml | 19 ++++++-- 10 files changed, 166 insertions(+), 42 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 60466f96362..f1fe7b2b09d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -319,6 +319,7 @@ private DatasetVersion createNewDatasetVersion(Template template, FileMetadata f TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); terms.setDatasetVersion(dsv); terms.setLicense(TermsOfUseAndAccess.License.CC0); + terms.setFileAccessRequest(true); dsv.setTermsOfUseAndAccess(terms); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 90ca5ecb027..0367fca8591 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -394,6 +394,18 @@ public void setRsyncScript(String rsyncScript) { public String getRsyncScriptFilename() { return rsyncScriptFilename; } + + private Boolean hasRestrictedFiles = null; + + public Boolean isHasRestrictedFiles(){ + //cache in page to limit processing + if (hasRestrictedFiles != null){ + return hasRestrictedFiles; + } else { + hasRestrictedFiles = workingVersion.isHasRestrictedFile(); + return hasRestrictedFiles; + } + } private String thumbnailString = null; @@ -2054,7 +2066,8 @@ private String init(boolean initFull) { previewTools = externalToolService.findFileToolsByType(ExternalTool.Type.PREVIEW); datasetExploreTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.EXPLORE); rowsPerPage = 10; - + hasRestrictedFiles = workingVersion.isHasRestrictedFile(); + return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index d53cf20491c..2a235e5fefb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -560,6 +560,13 @@ public boolean isHasNonPackageFile(){ // The presence of any non-package file means that HTTP Upload was used (no mixing allowed) so we just check the first file. return !this.fileMetadatas.get(0).getDataFile().getContentType().equals(DataFileServiceBean.MIME_TYPE_PACKAGE_FILE); } + + public boolean isHasRestrictedFile(){ + if (this.fileMetadatas.isEmpty()){; + return false; + } + return this.fileMetadatas.stream().anyMatch(fm -> (fm.isRestricted())); + } public void updateDefaultValuesFromTemplate(Template template) { if (!template.getDatasetFields().isEmpty()) { @@ -636,6 +643,11 @@ public void initDefaultValues() { TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); terms.setDatasetVersion(this); terms.setLicense(TermsOfUseAndAccess.License.CC0); + /* + Added for https://github.com/IQSS/dataverse/issues/8191 + set File Access Request to true + */ + terms.setFileAccessRequest(true); this.setTermsOfUseAndAccess(terms); } @@ -1665,7 +1677,23 @@ public Set validate() { } } } + + + TermsOfUseAndAccess toua = this.termsOfUseAndAccess; + //Only need to test Terms of Use and Access if there are restricted files + if (toua != null && this.isHasRestrictedFile()) { + Set> constraintViolations = validator.validate(toua); + if (constraintViolations.size() > 0) { + ConstraintViolation violation = constraintViolations.iterator().next(); + String message = "Constraint violation found in Terms of Use and Access. " + + "If Request Access to restricted files is disabled then Terms of Access must be provided."; + logger.info(message); + this.termsOfUseAndAccess.setValidationMessage(message); + returnSet.add(violation); + } + } + return returnSet; } diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java index b01b0a2b792..5b9d7c82fe8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Template.java +++ b/src/main/java/edu/harvard/iq/dataverse/Template.java @@ -326,6 +326,7 @@ public Template cloneNewTemplate(Template source) { } else { terms = new TermsOfUseAndAccess(); terms.setLicense(TermsOfUseAndAccess.defaultLicense); + terms.setFileAccessRequest(true); } newTemplate.setTermsOfUseAndAccess(terms); return newTemplate; diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java index 72f4ab54ee8..04dd48ea473 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java @@ -14,6 +14,7 @@ import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.OneToOne; +import javax.persistence.Transient; /** * @@ -21,6 +22,7 @@ * @author skraffmi */ @Entity +@ValidateTermsOfUseAndAccess public class TermsOfUseAndAccess implements Serializable { @Id @@ -275,6 +277,17 @@ public enum License { NONE, CC0 } + @Transient + private String validationMessage; + + public String getValidationMessage() { + return validationMessage; + } + + public void setValidationMessage(String validationMessage) { + this.validationMessage = validationMessage; + } + /** * @todo What does the GUI use for a default license? What does the "native" * API use? See also https://github.com/IQSS/dataverse/issues/1385 diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java index dfa9e9f6c77..394d0f359ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java @@ -21,12 +21,46 @@ public void initialize(ValidateTermsOfUseAndAccess constraintAnnotation) { @Override public boolean isValid(TermsOfUseAndAccess value, ConstraintValidatorContext context) { - //if both null invalid - //if(value.getTemplate() == null && value.getDatasetVersion() == null) return false; + //must allow access requests or have terms of access filled in. - //if both not null invalid - //return !(value.getTemplate() != null && value.getDatasetVersion() != null); - return true; + boolean valid = value.isFileAccessRequest() == true || (value.getTermsOfAccess() != null && !value.getTermsOfAccess().isEmpty()) ; + if (!valid) { + try { + + + if ( context != null) { + context.buildConstraintViolationWithTemplate( "If Request Access is false then Terms of Access must be provided.").addConstraintViolation(); + } + + String message = "Constraint violation found in Terms of Use and Access. " + + " If Request Access to restricted files is set to false then Terms of Access must be provided."; + + value.setValidationMessage(message); + } catch (NullPointerException e) { + return false; + } + return false; + } + + + return valid; + } + + public static boolean isTOUAValid(TermsOfUseAndAccess value, ConstraintValidatorContext context){ + + boolean valid = value.isFileAccessRequest() == true || (value.getTermsOfAccess() != null && !value.getTermsOfAccess().isEmpty()); + if (!valid) { + + if (context != null) { + context.buildConstraintViolationWithTemplate("If Request Access is false then Terms of Access must be provided.").addConstraintViolation(); + } + + String message = "Constraint violation found in Terms of Use and Access. " + + " If Request Access to restricted files is set to false then Terms of Access must be provided."; + + value.setValidationMessage(message); + } + return valid; } - + } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index fbbda5213ad..621b9116381 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2124,8 +2124,8 @@ citationFrame.banner.countdownMessage.seconds=seconds #file-edit-popup-fragment.xhtml #editFilesFragment.xhtml dataset.access.accessHeader=Restrict Files and Add Dataset Terms of Access -dataset.access.description=Restricting limits access to published files. You can add or edit Terms of Access for the dataset, and allow people to Request Access to restricted files. - +dataset.access.description=Restricting limits access to published files. Providing information about access to restricted files is required. By default people who want to use these files can request access to them. You can provide Terms of Access instead by unchecking the box and adding them. These settings can be changed when you edit the dataset. Learn about restricting files and dataset access in the User Guide. +dataset.access.description.line.2=One of the following methods for communicating access must be active, and applies to all restricted files in this dataset. #datasetFieldForEditFragment.xhtml dataset.AddReplication=Add "Replication Data for" to Title dataset.replicationDataFor=Replication Data for: diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index c5bdc8638cf..cf623ec8c8a 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -277,26 +277,38 @@ -
- -
- -
-
+

+ + + + +

+

+ #{bundle['dataset.access.description.line.2']} +

+
+
- +
+
+ +
+ +
+
@@ -370,7 +382,7 @@ data-toggle="tooltip" data-placement="auto right" data-original-title="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title']}">
- +
diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index e5e12201fc8..e9cd978c690 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -593,36 +593,49 @@

- #{bundle['dataset.access.description']} -

-
+ + + + +

+

+ #{bundle['dataset.access.description.line.2']} +

+

-
+
-
-
+

-
-
+ From 840cd1e911f517cc1ad0c8851fb3d0c700cf25a8 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 2 Feb 2022 12:07:13 -0500 Subject: [PATCH 031/366] Update 'report-datasets' jsonarray and docs for counter-proc. v0.1.04 --- doc/sphinx-guides/source/admin/make-data-count.rst | 6 +++--- .../source/developers/make-data-count.rst | 4 ++-- .../source/installation/prerequisites.rst | 10 +++++----- scripts/vagrant/setup-counter-processor.sh | 2 +- .../makedatacount/DatasetMetricsServiceBean.java | 7 ++++++- 5 files changed, 17 insertions(+), 12 deletions(-) diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index 8d3dbba5127..3f1b04c3c36 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -83,9 +83,9 @@ Configure Counter Processor * Change to the directory where you installed Counter Processor. - * ``cd /usr/local/counter-processor-0.0.1`` + * ``cd /usr/local/counter-processor-0.1.04`` -* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-0.0.1``. +* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-0.1.04``. * Edit the config file and pay particular attention to the FIXME lines. @@ -98,7 +98,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single * Change to the directory where you installed Counter Processor. - * ``cd /usr/local/counter-processor-0.0.1`` + * ``cd /usr/local/counter-processor-0.1.04`` * If you are running Counter Processor for the first time in the middle of a month, you will need create blank log files for the previous days. e.g.: diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index 88ca0007a8b..9c6095136b8 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -51,7 +51,7 @@ Once you are done with your configuration, you can run Counter Processor like th ``su - counter`` -``cd /usr/local/counter-processor-0.0.1`` +``cd /usr/local/counter-processor-0.1.04`` ``CONFIG_FILE=counter-processor-config.yaml python36 main.py`` @@ -84,7 +84,7 @@ Second, if you are also sending your SUSHI report to Make Data Count, you will n ``curl -H "Authorization: Bearer $JSON_WEB_TOKEN" -X DELETE https://$MDC_SERVER/reports/$REPORT_ID`` -To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-0.0.1/tmp/datacite_response_body.txt`` +To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-0.1.04/tmp/datacite_response_body.txt`` To read more about the Make Data Count api, see https://github.com/datacite/sashimi diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index d25076aab89..68868f00f5e 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -96,7 +96,7 @@ PostgreSQL ---------- Installing PostgreSQL -======================= +===================== The application has been tested with PostgreSQL versions up to 13 and version 10+ is required. We recommend installing the latest version that is available for your OS distribution. *For example*, to install PostgreSQL 13 under RHEL7/derivative:: @@ -425,12 +425,12 @@ Counter Processor has only been tested on el7 (see "Linux" above). Please note t As root, download and install Counter Processor:: cd /usr/local - wget https://github.com/CDLUC3/counter-processor/archive/v0.0.1.tar.gz + wget https://github.com/CDLUC3/counter-processor/archive/v0.1.04.tar.gz tar xvfz v0.0.1.tar.gz As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball, untar it, and copy the geoip database into place:: - cd /usr/local/counter-processor-0.0.1 + cd /usr/local/counter-processor-0.1.04 wget https://geolite.maxmind.com/download/geoip/database/GeoLite2-Country.tar.gz tar xvfz GeoLite2-Country.tar.gz cp GeoLite2-Country_*/GeoLite2-Country.mmdb maxmind_geoip @@ -438,7 +438,7 @@ As root, change to the Counter Processor directory you just created, download th As root, create a "counter" user and change ownership of Counter Processor directory to this new user:: useradd counter - chown -R counter:counter /usr/local/counter-processor-0.0.1 + chown -R counter:counter /usr/local/counter-processor-0.1.04 Installing Counter Processor Python Requirements ================================================ @@ -456,7 +456,7 @@ Install Python 3.6:: Install Counter Processor Python requirements:: python3.6 -m ensurepip - cd /usr/local/counter-processor-0.0.1 + cd /usr/local/counter-processor-0.1.04 pip3 install -r requirements.txt See the :doc:`/admin/make-data-count` section of the Admin Guide for how to configure and run Counter Processor. diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh index 29d860208e5..659dc5163f4 100755 --- a/scripts/vagrant/setup-counter-processor.sh +++ b/scripts/vagrant/setup-counter-processor.sh @@ -11,7 +11,7 @@ python3.6 -m ensurepip COUNTER_USER=counter echo "Ensuring Unix user '$COUNTER_USER' exists" useradd $COUNTER_USER || : -COMMIT='a73dbced06f0ac2f0d85231e4d9dd4f21bee8487' +COMMIT='7974dad259465ba196ef639f48dea007cae8f9ac' UNZIPPED_DIR="counter-processor-$COMMIT" if [ ! -e $UNZIPPED_DIR ]; then ZIP_FILE="${COMMIT}.zip" diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 9c9dc24a17a..39afdf318ad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -118,7 +118,12 @@ public List parseSushiReport(JsonObject report){ public List parseSushiReport(JsonObject report, Dataset dataset) { List datasetMetricsAll = new ArrayList<>(); - JsonArray reportDatasets = report.getJsonArray("report_datasets"); + //Current counter-processor v 0.1.04+ format + JsonArray reportDatasets = report.getJsonArray("report-datasets"); + if(reportDatasets==null) { + //Try counter-processor v 0.0.1 name + reportDatasets = report.getJsonArray("report_datasets"); + } for (JsonValue reportDataset : reportDatasets) { List datasetMetricsDataset = new ArrayList<>(); String globalId = null; From e07f60cf053be7fffc7ef46733490807120f202a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 2 Feb 2022 12:28:25 -0500 Subject: [PATCH 032/366] #6629 update GeoLite db docs --- .../source/installation/prerequisites.rst | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 68868f00f5e..2bd51465e24 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -427,14 +427,24 @@ As root, download and install Counter Processor:: cd /usr/local wget https://github.com/CDLUC3/counter-processor/archive/v0.1.04.tar.gz tar xvfz v0.0.1.tar.gz + cd /usr/local/counter-processor-0.1.04 -As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball, untar it, and copy the geoip database into place:: +Installing GeoLite Country Database +=================================== - cd /usr/local/counter-processor-0.1.04 - wget https://geolite.maxmind.com/download/geoip/database/GeoLite2-Country.tar.gz +Counter Processor can report per country results if the optional GeoLite Country Database is installed. At present, this database is free but to use it one nust signing an agreement (EULA) with MaxMind. +(The primary concern appears to be that individuals can opt-out of having their location tracked via IP address and, due to various privacy laws, MaxMind needs a way to comply with that for products it has 'sold' (for no cost in this case). Their agreement requires you to either configure automatic updates to the GeoLite Country database or be responsible on your own for managing take down notices.) +The process required to sign up, download the database, and to configure automated updating is described at https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases/ and the links from that page. + +As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball from MaxMind, untar it, and copy the geoip database into place:: + + tar xvfz GeoLite2-Country.tar.gz cp GeoLite2-Country_*/GeoLite2-Country.mmdb maxmind_geoip +Creating a counter User +======================= + As root, create a "counter" user and change ownership of Counter Processor directory to this new user:: useradd counter From 73aa1140b1d181cf6bddc8de2847cb59caaa1965 Mon Sep 17 00:00:00 2001 From: "don.sizemore" Date: Fri, 4 Feb 2022 09:13:38 -0500 Subject: [PATCH 033/366] #8295 bump pgjdbc 42.3.1 to 42.3.2 to address CVE-2022-21724 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 074e08ca7ba..93d823145f0 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ 8.0.0 5.2021.5 - 42.3.1 + 42.3.2 1.11.762 1.7.35 1.2.18.4 From 5a075218be1b83b1273f2af441a0829d18322467 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 9 Feb 2022 09:46:29 -0500 Subject: [PATCH 034/366] #8191 remove duplicate popup --- .../edu/harvard/iq/dataverse/DatasetPage.java | 10 +++- src/main/webapp/dataset.xhtml | 20 +++---- src/main/webapp/editFilesFragment.xhtml | 58 +------------------ .../webapp/file-edit-popup-fragment.xhtml | 6 +- 4 files changed, 23 insertions(+), 71 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index cbe64994252..6b608d57dc3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3280,15 +3280,17 @@ public List completeHostDataverseMenuList(String query) { public String restrictFiles(boolean restricted) throws CommandException { List filesToRestrict = new ArrayList(); - if (fileMetadataForAction != null) { filesToRestrict.add(fileMetadataForAction); } else { filesToRestrict = this.getSelectedFiles(); } - restrictFiles(filesToRestrict, restricted); - return save(); + if (editMode == EditMode.CREATE) { + return ""; + } else { + return save(); + } } private void restrictFiles(List filesToRestrict, boolean restricted) throws CommandException { @@ -3622,6 +3624,8 @@ public String save() { if (editMode != null) { if (editMode.equals(EditMode.CREATE)) { + + System.out.print("adding files..."); // We allow users to upload files on Create: int nNewFiles = newFiles.size(); logger.fine("NEW FILES: "+nNewFiles); diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 5f841ad890d..e4721aaf13c 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -927,17 +927,15 @@ - - - - - - - - - - - + + + + + + + + +

#{bundle['dataset.share.datasetShare.tip']}

diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 362ae279187..6558bb47b38 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -440,11 +440,12 @@
  • + update="@([id$=accessPopup])" + oncomplete="PF('accessPopup').show();bind_bsui_components();"> +
  • @@ -608,59 +609,6 @@
  • - - -

    - #{bundle['dataset.access.description']} -

    -

    - - - - -

    -

    -

    - -
    - - - -
    -
    - -
    - -
    - - - -
    -
    -

    -
    - - -
    -
    diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index f194a91c914..50fafc66d31 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -44,7 +44,8 @@ data-toggle="tooltip" data-placement="auto right" data-original-title="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title']}">
    - @@ -59,7 +60,7 @@
    - +
    @@ -72,6 +73,7 @@
    -
    Date: Fri, 11 Feb 2022 17:24:15 -0500 Subject: [PATCH 037/366] #8191 remove out of date comment/code --- .../edu/harvard/iq/dataverse/TermsOfUseAndAccess.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java index c4f52d7ffca..a8616283332 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java @@ -300,14 +300,6 @@ public void setValidationMessage(String validationMessage) { this.validationMessage = validationMessage; } - /** - * @todo What does the GUI use for a default license? What does the "native" - * API use? See also https://github.com/IQSS/dataverse/issues/1385 - */ - /* - public static TermsOfUseAndAccess.License defaultLicense = TermsOfUseAndAccess.License.CC0; - public static String CC0_URI = "https://creativecommons.org/publicdomain/zero/1.0/"; - */ @Override public int hashCode() { int hash = 0; From 4764b8e5501e0ba1b4c5bf791e2dfc7533bc52fd Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 15 Feb 2022 15:19:19 -0500 Subject: [PATCH 038/366] read metadatalanauge if sent, set if exists and is allowed not sent - use default not allowed - throw exception to return bad request --- .../edu/harvard/iq/dataverse/util/json/JsonParser.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 4930d0a4e7b..5a49c56acbf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -301,7 +301,13 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException { dataset.setAuthority(obj.getString("authority", null) == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Authority) : obj.getString("authority")); dataset.setProtocol(obj.getString("protocol", null) == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Protocol) : obj.getString("protocol")); dataset.setIdentifier(obj.getString("identifier",null)); - + String mdl = obj.getString("metadatalanguage",null); + if(mdl==null || settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(mdl)) { + dataset.setMetadataLanguage(mdl); + }else { + throw new JsonParseException("Specified metadatalangauge not allowed."); + } + DatasetVersion dsv = new DatasetVersion(); dsv.setDataset(dataset); dsv = parseDatasetVersion(obj.getJsonObject("datasetVersion"), dsv); From 212c824079b31cb68ef3bbe69dc55d2fee0e6773 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 15 Feb 2022 15:40:27 -0500 Subject: [PATCH 039/366] add metadata language as schema.org/inLanguage for import/export --- .../edu/harvard/iq/dataverse/api/Dataverses.java | 12 +++++++++++- .../edu/harvard/iq/dataverse/util/bagit/OREMap.java | 5 ++++- .../harvard/iq/dataverse/util/json/JSONLDUtil.java | 3 +++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0a4c76a54e3..a9c706996a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -66,6 +66,7 @@ import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; import edu.harvard.iq.dataverse.util.json.JSONLDUtil; +import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; import java.io.StringReader; @@ -110,6 +111,7 @@ import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; +import java.util.HashMap; import java.util.Map; import java.util.Optional; import javax.servlet.http.HttpServletResponse; @@ -300,6 +302,11 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie ds.setIdentifier(null); ds.setProtocol(null); ds.setGlobalIdCreateTime(null); + + //Verify metadatalanguage is allowed + if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) { + throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed."); + } Dataset managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u))); return created("/datasets/" + managedDs.getId(), @@ -479,8 +486,11 @@ public Response recreateDataset(String jsonLDBody, @PathParam("identifier") Stri if(!datasetSvc.isIdentifierLocallyUnique(ds)) { throw new BadRequestException("Cannot recreate a dataset whose PID is already in use"); } - + //Verify metadatalanguage is allowed + if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) { + throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed."); + } if (ds.getVersions().isEmpty()) { return badRequest("Supplied json must contain a single dataset version."); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 38a04b36314..ff9de5d8f25 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -214,7 +214,10 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except aggBuilder.add(JsonLDTerm.schemaOrg("includedInDataCatalog").getLabel(), BrandingUtil.getRootDataverseCollectionName()); - + + addIfNotNull(aggBuilder, JsonLDTerm.schemaOrg("inLanguage"), dataset.getMetadataLanguage()); + + // The aggregation aggregates aggregatedresources (Datafiles) which each have // their own entry and metadata JsonArrayBuilder aggResArrayBuilder = Json.createArrayBuilder(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java index 3fdacbdc8de..62cd54387b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java @@ -89,6 +89,9 @@ public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody, + "'. Make sure it is in valid form - see Dataverse Native API documentation."); } } + + //Store the metadatalanguage if sent - the caller needs to check whether it is allowed (as with any GlobalID) + ds.setMetadataLanguage(jsonld.getString(JsonLDTerm.schemaOrg("inLanguage").getUrl(),null)); dsv = updateDatasetVersionMDFromJsonLD(dsv, jsonld, metadataBlockSvc, datasetFieldSvc, append, migrating, licenseSvc); dsv.setDataset(ds); From a156c528468180cdd86a9828f132d39d6c097f68 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 15 Feb 2022 17:13:53 -0500 Subject: [PATCH 040/366] i18n more fields --- .../dataverse/export/ddi/DdiExportUtil.java | 50 ++++++++++--------- 1 file changed, 26 insertions(+), 24 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 10c02342867..a49e0a73fbf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -437,7 +437,7 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset } } if (DatasetFieldConstant.kindOfData.equals(fieldDTO.getTypeName())) { - writeMultipleElement(xmlw, "dataKind", fieldDTO); + writeMultipleElement(xmlw, "dataKind", fieldDTO, lang); } } } @@ -496,7 +496,7 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset } if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) { writeFullElement(xmlw, "southBL", next.getSinglePrimitive()); - } + } } xmlw.writeEndElement(); @@ -510,20 +510,21 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset if("socialscience".equals(key)){ for (FieldDTO fieldDTO : value.getFields()) { if (DatasetFieldConstant.universe.equals(fieldDTO.getTypeName())) { - writeMultipleElement(xmlw, "universe", fieldDTO); + writeMultipleElement(xmlw, "universe", fieldDTO, lang); } if (DatasetFieldConstant.unitOfAnalysis.equals(fieldDTO.getTypeName())) { writeI18NElementList(xmlw, "anlyUnit", fieldDTO.getMultipleVocab(), "unitOfAnalysis", fieldDTO.getTypeClass(), "socialscience", lang); } - } + } } } xmlw.writeEndElement(); //sumDscr } - private static void writeMultipleElement(XMLStreamWriter xmlw, String element, FieldDTO fieldDTO) throws XMLStreamException { + private static void writeMultipleElement(XMLStreamWriter xmlw, String element, FieldDTO fieldDTO, String lang) throws XMLStreamException { for (String value : fieldDTO.getMultiplePrimitive()) { - writeFullElement(xmlw, element, value); + //Write multiple lang vals for controlled vocab, otherwise don't include any lang tag + writeFullElement(xmlw, element, value, fieldDTO.getTypeClass().equals("controlledVocabulary") ? lang : null); } } @@ -541,37 +542,37 @@ private static void writeDateElement(XMLStreamWriter xmlw, String element, Strin private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO version, String lang) throws XMLStreamException{ xmlw.writeStartElement("method"); xmlw.writeStartElement("dataColl"); - writeI18NElement(xmlw, "timeMeth", version, DatasetFieldConstant.timeMethod,lang); - writeFullElement(xmlw, "dataCollector", dto2Primitive(version, DatasetFieldConstant.dataCollector)); - writeFullElement(xmlw, "collectorTraining", dto2Primitive(version, DatasetFieldConstant.collectorTraining)); - writeFullElement(xmlw, "frequenc", dto2Primitive(version, DatasetFieldConstant.frequencyOfDataCollection)); + writeI18NElement(xmlw, "timeMeth", version, DatasetFieldConstant.timeMethod,lang); + writeI18NElement(xmlw, "dataCollector", version, DatasetFieldConstant.dataCollector, lang); + writeI18NElement(xmlw, "collectorTraining", version, DatasetFieldConstant.collectorTraining, lang); + writeI18NElement(xmlw, "frequenc", version, DatasetFieldConstant.frequencyOfDataCollection, lang); writeI18NElement(xmlw, "sampProc", version, DatasetFieldConstant.samplingProcedure, lang); writeTargetSampleElement(xmlw, version); - writeFullElement(xmlw, "deviat", dto2Primitive(version, DatasetFieldConstant.deviationsFromSampleDesign)); + writeI18nElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang); xmlw.writeStartElement("sources"); - writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources)); - writeFullElement(xmlw, "srcOrig", dto2Primitive(version, DatasetFieldConstant.originOfSources)); - writeFullElement(xmlw, "srcChar", dto2Primitive(version, DatasetFieldConstant.characteristicOfSources)); - writeFullElement(xmlw, "srcDocu", dto2Primitive(version, DatasetFieldConstant.accessToSources)); + writeI18NElementList(xmlw, "dataSrc", version, DatasetFieldConstant.dataSources, lang); + writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang); + writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang); + writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang); xmlw.writeEndElement(); //sources writeI18NElement(xmlw, "collMode", version, DatasetFieldConstant.collectionMode, lang); writeI18NElement(xmlw, "resInstru", version, DatasetFieldConstant.researchInstrument, lang); - writeFullElement(xmlw, "collSitu", dto2Primitive(version, DatasetFieldConstant.dataCollectionSituation)); - writeFullElement(xmlw, "actMin", dto2Primitive(version, DatasetFieldConstant.actionsToMinimizeLoss)); - writeFullElement(xmlw, "conOps", dto2Primitive(version, DatasetFieldConstant.controlOperations)); - writeFullElement(xmlw, "weight", dto2Primitive(version, DatasetFieldConstant.weighting)); - writeFullElement(xmlw, "cleanOps", dto2Primitive(version, DatasetFieldConstant.cleaningOperations)); + writeI18NElement(xmlw, "collSitu", version, DatasetFieldConstant.dataCollectionSituation, lang); + writeI18NElement(xmlw, "actMin", version, DatasetFieldConstant.actionsToMinimizeLoss, lang); + writeI18NElement(xmlw, "conOps", version, DatasetFieldConstant.controlOperations, lang); + writeI18NElement(xmlw, "weight", version, DatasetFieldConstant.weighting, lang); + writeI18NElement(xmlw, "cleanOps", version, DatasetFieldConstant.cleaningOperations, lang); xmlw.writeEndElement(); //dataColl xmlw.writeStartElement("anlyInfo"); //writeFullElement(xmlw, "anylInfo", dto2Primitive(version, DatasetFieldConstant.datasetLevelErrorNotes)); - writeFullElement(xmlw, "respRate", dto2Primitive(version, DatasetFieldConstant.responseRate)); - writeFullElement(xmlw, "EstSmpErr", dto2Primitive(version, DatasetFieldConstant.samplingErrorEstimates)); - writeFullElement(xmlw, "dataAppr", dto2Primitive(version, DatasetFieldConstant.otherDataAppraisal)); + writeI18NElement(xmlw, "respRate", version, DatasetFieldConstant.responseRate, lang); + writeI18NElement(xmlw, "EstSmpErr", version, DatasetFieldConstant.samplingErrorEstimates, lang); + writeI18NElement(xmlw, "dataAppr", version, DatasetFieldConstant.otherDataAppraisal, lang); xmlw.writeEndElement(); //anlyInfo writeNotesElement(xmlw, version); @@ -1412,9 +1413,10 @@ private static void writeI18NElement(XMLStreamWriter xmlw, String name, DatasetV String val = dto2Primitive(version, fieldTypeName); Locale defaultLocale = Locale.getDefault(); // Get the language-specific value for the default language + // A null value is returned if this is not a CVV field String localeVal = dto2Primitive(version, fieldTypeName, defaultLocale); String requestedLocaleVal = null; - if (lang != null && !defaultLocale.getLanguage().equals(lang)) { + if (lang != null && localeVal != null && !defaultLocale.getLanguage().equals(lang)) { // Also get the value in the requested locale/lang if that's not the default // lang. requestedLocaleVal = dto2Primitive(version, fieldTypeName, new Locale(lang)); From 6c09f15a3b1238ab9461091c438955b8296b5a0e Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 17 Feb 2022 14:27:32 -0500 Subject: [PATCH 041/366] isControlledVocabularyField convenience method --- src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java index 9d79e68cca3..684a4d12b7e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/FieldDTO.java @@ -290,6 +290,10 @@ public Object getConvertedValue() { } } + public boolean isControlledVocabularyField() { + return getTypeClass().equals("controlledVocabulary"); + } + @Override public int hashCode() { int hash = 3; From d60d63bfc9e7e88c9a9268cd43a27b589fae933b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 17 Feb 2022 14:29:34 -0500 Subject: [PATCH 042/366] handle keyword CVV vals --- .../dataverse/export/ddi/DdiExportUtil.java | 135 +++++++++++++----- 1 file changed, 97 insertions(+), 38 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index a49e0a73fbf..cb5a10fd81e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -93,6 +93,7 @@ public class DdiExportUtil { public static final String NOTE_TYPE_CONTENTTYPE = "DATAVERSE:CONTENTTYPE"; public static final String NOTE_SUBJECT_CONTENTTYPE = "Content/MIME Type"; + public static final String CITATION_BLOCK_NAME = "citation"; public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) { logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson)); @@ -524,7 +525,7 @@ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, Dataset private static void writeMultipleElement(XMLStreamWriter xmlw, String element, FieldDTO fieldDTO, String lang) throws XMLStreamException { for (String value : fieldDTO.getMultiplePrimitive()) { //Write multiple lang vals for controlled vocab, otherwise don't include any lang tag - writeFullElement(xmlw, element, value, fieldDTO.getTypeClass().equals("controlledVocabulary") ? lang : null); + writeFullElement(xmlw, element, value, fieldDTO.isControlledVocabularyField() ? lang : null); } } @@ -550,10 +551,10 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO writeTargetSampleElement(xmlw, version); - writeI18nElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang); + writeI18NElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang); xmlw.writeStartElement("sources"); - writeI18NElementList(xmlw, "dataSrc", version, DatasetFieldConstant.dataSources, lang); + writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources)); writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang); writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang); writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang); @@ -582,18 +583,20 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String lang) throws XMLStreamException{ //Key Words and Topic Classification - - xmlw.writeStartElement("subject"); + Locale defaultLocale = Locale.getDefault(); + xmlw.writeStartElement("subject"); for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { String key = entry.getKey(); MetadataBlockDTO value = entry.getValue(); - if ("citation".equals(key)) { + if (CITATION_BLOCK_NAME.equals(key)) { for (FieldDTO fieldDTO : value.getFields()) { - if (DatasetFieldConstant.subject.equals(fieldDTO.getTypeName())){ - writeI18NElementList(xmlw, "keyword", fieldDTO.getMultipleVocab(), "subject", fieldDTO.getTypeClass(), "citation", lang); + if (DatasetFieldConstant.subject.equals(fieldDTO.getTypeName())) { + writeI18NElementList(xmlw, "keyword", fieldDTO.getMultipleVocab(), "subject", + fieldDTO.getTypeClass(), "citation", lang); } - + if (DatasetFieldConstant.keyword.equals(fieldDTO.getTypeName())) { + boolean isCVV = false; for (HashSet foo : fieldDTO.getMultipleCompound()) { String keywordValue = ""; String keywordVocab = ""; @@ -601,30 +604,57 @@ private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.keywordValue.equals(next.getTypeName())) { - keywordValue = next.getSinglePrimitive(); + if (next.isControlledVocabularyField()) { + isCVV = true; + } + keywordValue = next.getSinglePrimitive(); } if (DatasetFieldConstant.keywordVocab.equals(next.getTypeName())) { - keywordVocab = next.getSinglePrimitive(); + keywordVocab = next.getSinglePrimitive(); } if (DatasetFieldConstant.keywordVocabURI.equals(next.getTypeName())) { - keywordURI = next.getSinglePrimitive(); + keywordURI = next.getSinglePrimitive(); } } - if (!keywordValue.isEmpty()){ - xmlw.writeStartElement("keyword"); - if(!keywordVocab.isEmpty()){ - writeAttribute(xmlw,"vocab",keywordVocab); + if (!keywordValue.isEmpty()) { + xmlw.writeStartElement("keyword"); + if (!keywordVocab.isEmpty()) { + writeAttribute(xmlw, "vocab", keywordVocab); + } + if (!keywordURI.isEmpty()) { + writeAttribute(xmlw, "vocabURI", keywordURI); + } + if (lang != null && isCVV) { + writeAttribute(xmlw, "xml:lang", defaultLocale.getLanguage()); + xmlw.writeCharacters(ControlledVocabularyValue.getLocaleStrValue(keywordValue, + DatasetFieldConstant.keywordValue, CITATION_BLOCK_NAME, defaultLocale, + true)); + } else { + xmlw.writeCharacters(keywordValue); + } + xmlw.writeEndElement(); // Keyword + if (lang != null && isCVV && !defaultLocale.getLanguage().equals(lang)) { + String translatedValue = ControlledVocabularyValue.getLocaleStrValue(keywordValue, + DatasetFieldConstant.keywordValue, CITATION_BLOCK_NAME, new Locale(lang), + false); + if (translatedValue != null) { + xmlw.writeStartElement("keyword"); + if (!keywordVocab.isEmpty()) { + writeAttribute(xmlw, "vocab", keywordVocab); + } + if (!keywordURI.isEmpty()) { + writeAttribute(xmlw, "vocabURI", keywordURI); + } + writeAttribute(xmlw, "xml:lang", lang); + xmlw.writeCharacters(translatedValue); + xmlw.writeEndElement(); // Keyword + } } - if(!keywordURI.isEmpty()){ - writeAttribute(xmlw,"vocabURI",keywordURI); - } - xmlw.writeCharacters(keywordValue); - xmlw.writeEndElement(); //Keyword } - } } if (DatasetFieldConstant.topicClassification.equals(fieldDTO.getTypeName())) { + boolean isCVV = false; for (HashSet foo : fieldDTO.getMultipleCompound()) { String topicClassificationValue = ""; String topicClassificationVocab = ""; @@ -632,34 +662,63 @@ private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO for (Iterator iterator = foo.iterator(); iterator.hasNext();) { FieldDTO next = iterator.next(); if (DatasetFieldConstant.topicClassValue.equals(next.getTypeName())) { - topicClassificationValue = next.getSinglePrimitive(); + // Currently getSingleVocab() is the same as getSinglePrimitive() so this works + // for either case + topicClassificationValue = next.getSinglePrimitive(); + if (next.isControlledVocabularyField()) { + isCVV = true; + } } if (DatasetFieldConstant.topicClassVocab.equals(next.getTypeName())) { - topicClassificationVocab = next.getSinglePrimitive(); + topicClassificationVocab = next.getSinglePrimitive(); } if (DatasetFieldConstant.topicClassVocabURI.equals(next.getTypeName())) { - topicClassificationURI = next.getSinglePrimitive(); + topicClassificationURI = next.getSinglePrimitive(); } } - if (!topicClassificationValue.isEmpty()){ - xmlw.writeStartElement("topcClas"); - if(!topicClassificationVocab.isEmpty()){ - writeAttribute(xmlw,"vocab",topicClassificationVocab); - } - if(!topicClassificationURI.isEmpty()){ - writeAttribute(xmlw,"vocabURI",topicClassificationURI); - } - xmlw.writeCharacters(topicClassificationValue); - xmlw.writeEndElement(); //topcClas + if (!topicClassificationValue.isEmpty()) { + xmlw.writeStartElement("topcClas"); + if (!topicClassificationVocab.isEmpty()) { + writeAttribute(xmlw, "vocab", topicClassificationVocab); + } + if (!topicClassificationURI.isEmpty()) { + writeAttribute(xmlw, "vocabURI", topicClassificationURI); + } + if (lang != null && isCVV) { + writeAttribute(xmlw, "xml:lang", defaultLocale.getLanguage()); + xmlw.writeCharacters(ControlledVocabularyValue.getLocaleStrValue( + topicClassificationValue, DatasetFieldConstant.topicClassValue, + CITATION_BLOCK_NAME, defaultLocale, true)); + } else { + xmlw.writeCharacters(topicClassificationValue); + } + xmlw.writeEndElement(); // topcClas + if (lang != null && isCVV && !defaultLocale.getLanguage().equals(lang)) { + String translatedValue = ControlledVocabularyValue.getLocaleStrValue( + topicClassificationValue, DatasetFieldConstant.topicClassValue, + CITATION_BLOCK_NAME, new Locale(lang), false); + if (translatedValue != null) { + xmlw.writeStartElement("topcClas"); + if (!topicClassificationVocab.isEmpty()) { + writeAttribute(xmlw, "vocab", topicClassificationVocab); + } + if (!topicClassificationURI.isEmpty()) { + writeAttribute(xmlw, "vocabURI", topicClassificationURI); + } + writeAttribute(xmlw, "xml:lang", lang); + xmlw.writeCharacters(translatedValue); + xmlw.writeEndElement(); // topcClas + } + } } } } } } - } + } xmlw.writeEndElement(); // subject } - + private static void writeAuthorsElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException { for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { @@ -1339,7 +1398,7 @@ private static String dto2Primitive(DatasetVersionDTO datasetVersionDTO, String for (FieldDTO fieldDTO : value.getFields()) { if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) { String rawVal = fieldDTO.getSinglePrimitive(); - if (fieldDTO.getTypeClass().equals("controlledVocabulary")) { + if (fieldDTO.isControlledVocabularyField()) { return ControlledVocabularyValue.getLocaleStrValue(rawVal, datasetFieldTypeName, value.getName(), locale, false); } From d3d1aab1b86b2df52688ac9a280e19118cb29219 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 18 Feb 2022 12:19:39 -0500 Subject: [PATCH 043/366] typo (Thanks pdurbin) and de-merge issues brealing this PR out --- .../harvard/iq/dataverse/SettingsWrapper.java | 48 +++------------ .../harvard/iq/dataverse/api/Dataverses.java | 4 +- .../settings/SettingsServiceBean.java | 59 +++++++++++++++++++ 3 files changed, 69 insertions(+), 42 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index ec06e6bb91a..dcbec37fd7e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -331,37 +331,20 @@ public Boolean isHasDropBoxKey() { public boolean isLocalesConfigured() { if (configuredLocales == null) { - initLocaleSettings(); + configuredLocales = new LinkedHashMap<>(); + settingsService.initLocaleSettings(configuredLocales); } return configuredLocales.size() > 1; } public Map getConfiguredLocales() { if (configuredLocales == null) { - initLocaleSettings(); + configuredLocales = new LinkedHashMap<>(); + settingsService.initLocaleSettings(configuredLocales); } return configuredLocales; } - private void initLocaleSettings() { - - configuredLocales = new LinkedHashMap<>(); - - try { - JSONArray entries = new JSONArray(getValueForKey(SettingsServiceBean.Key.Languages, "[]")); - for (Object obj : entries) { - JSONObject entry = (JSONObject) obj; - String locale = entry.getString("locale"); - String title = entry.getString("title"); - - configuredLocales.put(locale, title); - } - } catch (JSONException e) { - //e.printStackTrace(); - // do we want to know? - probably not - } - } - public boolean isDoiInstallation() { String protocol = getValueForKey(SettingsServiceBean.Key.Protocol); if ("doi".equals(protocol)) { @@ -490,31 +473,16 @@ public void validateEmbargoDate(FacesContext context, UIComponent component, Obj Map getBaseMetadataLanguageMap(boolean refresh) { if (languageMap == null || refresh) { - languageMap = new HashMap(); - - /* If MetadataLanaguages is set, use it. - * If not, we can't assume anything and should avoid assuming a metadata language - */ - String mlString = getValueForKey(SettingsServiceBean.Key.MetadataLanguages,""); - - if(mlString.isEmpty()) { - mlString="[]"; - } - JsonReader jsonReader = Json.createReader(new StringReader(mlString)); - JsonArray languages = jsonReader.readArray(); - for(JsonValue jv: languages) { - JsonObject lang = (JsonObject) jv; - languageMap.put(lang.getString("locale"), lang.getString("title")); - } + languageMap = settingsService.getBaseMetadataLanguageMap(languageMap, true); } return languageMap; } public Map getMetadataLanguages(DvObjectContainer target) { Map currentMap = new HashMap(); - currentMap.putAll(getBaseMetadataLanguageMap(true)); - languageMap.put(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE, getDefaultMetadataLanguageLabel(target)); - return languageMap; + currentMap.putAll(getBaseMetadataLanguageMap(false)); + currentMap.put(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE, getDefaultMetadataLanguageLabel(target)); + return currentMap; } private String getDefaultMetadataLanguageLabel(DvObjectContainer target) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a9c706996a7..5420762afd8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -305,7 +305,7 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie //Verify metadatalanguage is allowed if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) { - throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed."); + throw new BadRequestException("Specified metadatalanguage (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed."); } Dataset managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u))); @@ -489,7 +489,7 @@ public Response recreateDataset(String jsonLDBody, @PathParam("identifier") Stri //Verify metadatalanguage is allowed if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) { - throw new BadRequestException("Specified metadatalangauge (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed."); + throw new BadRequestException("Specified metadatalanguage (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed."); } if (ds.getVersions().isEmpty()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index efa944cf633..e13ea806dc7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -9,12 +9,23 @@ import javax.ejb.Stateless; import javax.inject.Named; import javax.json.Json; +import javax.json.JsonArray; import javax.json.JsonObject; +import javax.json.JsonReader; +import javax.json.JsonValue; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; + import java.io.StringReader; +import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; @@ -730,5 +741,53 @@ public Set listAll() { return new HashSet<>(em.createNamedQuery("Setting.findAll", Setting.class).getResultList()); } + public Map getBaseMetadataLanguageMap(Map languageMap, boolean refresh) { + if (languageMap == null || refresh) { + languageMap = new HashMap(); + + /* If MetadataLanaguages is set, use it. + * If not, we can't assume anything and should avoid assuming a metadata language + */ + String mlString = getValueForKey(SettingsServiceBean.Key.MetadataLanguages,""); + + if(mlString.isEmpty()) { + mlString="[]"; + } + JsonReader jsonReader = Json.createReader(new StringReader(mlString)); + JsonArray languages = jsonReader.readArray(); + for(JsonValue jv: languages) { + JsonObject lang = (JsonObject) jv; + languageMap.put(lang.getString("locale"), lang.getString("title")); + } + } + return languageMap; + } + public void initLocaleSettings(Map configuredLocales) { + + try { + JSONArray entries = new JSONArray(getValueForKey(SettingsServiceBean.Key.Languages, "[]")); + for (Object obj : entries) { + JSONObject entry = (JSONObject) obj; + String locale = entry.getString("locale"); + String title = entry.getString("title"); + + configuredLocales.put(locale, title); + } + } catch (JSONException e) { + //e.printStackTrace(); + // do we want to know? - probably not + } + } + + + public Set getConfiguredLanguages() { + Set langs = new HashSet(); + langs.addAll(getBaseMetadataLanguageMap(new HashMap(), true).keySet()); + Map configuredLocales = new LinkedHashMap<>(); + initLocaleSettings(configuredLocales); + langs.addAll(configuredLocales.keySet()); + return langs; + } + } From 36e077fa49e7483b895f21e26b727c18d0887f35 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 21 Feb 2022 16:21:24 -0500 Subject: [PATCH 044/366] Add debug info to test --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index c08a71eea65..685c45e128e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2326,7 +2326,7 @@ public void testReCreateDataset() { // Get the semantic metadata Response response = UtilIT.getDatasetJsonLDMetadata(datasetId, apiToken); response.then().assertThat().statusCode(OK.getStatusCode()); - + response.prettyPeek(); String expectedString = getData(response.getBody().asString()); // Delete the dataset via native API @@ -2337,6 +2337,7 @@ public void testReCreateDataset() { // Now use the migrate API to recreate the dataset // Now use the migrate API to recreate the dataset response = UtilIT.recreateDatasetJsonLD(apiToken, dataverseAlias, expectedString); + response.prettyPeek(); String body = response.getBody().asString(); response.then().assertThat().statusCode(CREATED.getStatusCode()); From 99174b9acc5926dad348d2b10536b15afa963c93 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 21 Feb 2022 17:53:04 -0500 Subject: [PATCH 045/366] check undefined code, not null --- .../java/edu/harvard/iq/dataverse/util/bagit/OREMap.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index ff9de5d8f25..7ae14655e81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DvObjectContainer; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.branding.BrandingUtil; @@ -214,9 +215,10 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except aggBuilder.add(JsonLDTerm.schemaOrg("includedInDataCatalog").getLabel(), BrandingUtil.getRootDataverseCollectionName()); - - addIfNotNull(aggBuilder, JsonLDTerm.schemaOrg("inLanguage"), dataset.getMetadataLanguage()); - + String mdl = dataset.getMetadataLanguage(); + if(!mdl.equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE)) { + aggBuilder.add(JsonLDTerm.schemaOrg("inLanguage").getLabel(), mdl); + } // The aggregation aggregates aggregatedresources (Datafiles) which each have // their own entry and metadata From 2163721e13034ec3598b34291852ca9d9d722ba4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 21 Feb 2022 18:53:48 -0500 Subject: [PATCH 046/366] explicitly print --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 685c45e128e..d72b754fdb6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2334,6 +2334,8 @@ public void testReCreateDataset() { deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); + + logger.info("SENDING to reCreate Dataset: " + expectedString); // Now use the migrate API to recreate the dataset // Now use the migrate API to recreate the dataset response = UtilIT.recreateDatasetJsonLD(apiToken, dataverseAlias, expectedString); From 4549f0c224ecde4d099218ecaef430bd598634fb Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 23 Feb 2022 09:52:20 -0500 Subject: [PATCH 047/366] TDL Bundle text --- src/main/java/propertyFiles/Bundle.properties | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 8abca8ff3fd..35d813586ce 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -145,7 +145,7 @@ contact.header=Contact {0} contact.dataverse.header=Email Dataverse Contact contact.dataset.header=Email Dataset Contact contact.to=To -contact.support=Support +contact.support=TDL Dataverse Support contact.from=From contact.from.required=User email is required. contact.from.invalid=Email is invalid. @@ -317,9 +317,9 @@ login.System=Login System login.forgot.text=Forgot your password? login.builtin=Dataverse Account login.institution=Institutional Account -login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. +login.institution.blurb=Log in or sign up with your institutional account — learn more. If you are not affiliated with a TDR member institution (see dropdown menu), please use the Google Login option. login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance. -login.builtin.credential.usernameOrEmail=Username/Email +login.builtin.credential.usernameOrEmail=Admin ID login.builtin.credential.password=Password login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? login.signup.blurb=Sign up for a Dataverse account. @@ -335,12 +335,12 @@ login.button.orcid=Create or Connect your ORCID # authentication providers auth.providers.title=Other options auth.providers.tip=You can convert a Dataverse account to use one of the options above. More information about account creation. -auth.providers.title.builtin=Username/Email +auth.providers.title.builtin=Admin ID auth.providers.title.shib=Your Institution auth.providers.title.orcid=ORCID -auth.providers.title.google=Google +auth.providers.title.google=Google (No TDR affiliation) auth.providers.title.github=GitHub -auth.providers.blurb=Log in or sign up with your {0} account — more information about account creation. Having trouble? Please contact {3} for assistance. +auth.providers.blurb=Log in or sign up with your Google account — learn more. If you are not affiliated with a TDR member institution, please use the Google Login option. Having trouble? Please contact {3} for assistance. auth.providers.persistentUserIdName.orcid=ORCID iD auth.providers.persistentUserIdName.github=ID auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers. @@ -383,7 +383,7 @@ shib.welcomeExistingUserMessageDefaultInstitution=your institution shib.dataverseUsername=Dataverse Username shib.currentDataversePassword=Current Dataverse Password shib.accountInformation=Account Information -shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account. +shib.offerToCreateNewAccount=Contact your TDR liaison to get help and training. Published content cannot be easily deleted. shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. # oauth2/firstLogin.xhtml From 7279c800fab3745634a3f1b4c01ec5094574c9f5 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 23 Feb 2022 11:07:53 -0500 Subject: [PATCH 048/366] #8191 consolidate delete function --- .../edu/harvard/iq/dataverse/DatasetPage.java | 44 +++++++------------ src/main/webapp/editFilesFragment.xhtml | 6 ++- 2 files changed, 21 insertions(+), 29 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 80917d58a1c..cb0539738c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -143,6 +143,7 @@ import edu.harvard.iq.dataverse.search.SearchServiceBean; import edu.harvard.iq.dataverse.search.SearchUtil; import edu.harvard.iq.dataverse.search.SolrClientService; +import edu.harvard.iq.dataverse.util.FileMetadataUtil; import java.util.Comparator; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -3374,7 +3375,16 @@ public String deleteFiles() throws CommandException{ } deleteFiles(filesToDelete); - String retVal = save(); + String retVal; + + if (editMode == EditMode.CREATE) { + workingVersion.setFileMetadatas(new ArrayList<>()); + retVal = ""; + } else { + retVal = save(); + } + + //And delete them only after the dataset is updated for(Embargo emb: orphanedEmbargoes) { embargoService.deleteById(emb.getId(), ((AuthenticatedUser)session.getUser()).getUserIdentifier()); @@ -3409,32 +3419,12 @@ private void deleteFiles(List filesToDelete) { // So below we are deleting the metadata from the version; we are // NOT adding the file to the filesToBeDeleted list that will be // passed to the UpdateDatasetCommand. -- L.A. Aug 2017 - Iterator fmit = dataset.getEditVersion().getFileMetadatas().iterator(); - while (fmit.hasNext()) { - FileMetadata fmd = fmit.next(); - if (markedForDelete.getDataFile().getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) { - // And if this is an image file that happens to be assigned - // as the dataset thumbnail, let's null the assignment here: - - if (fmd.getDataFile().equals(dataset.getThumbnailFile())) { - dataset.setThumbnailFile(null); - } - /* It should not be possible to get here if this file - is not in fact released! - so the code block below - is not needed. - //if not published then delete identifier - if (!fmd.getDataFile().isReleased()){ - try{ - commandEngine.submit(new DeleteDataFileCommand(fmd.getDataFile(), dvRequestService.getDataverseRequest())); - } catch (CommandException e){ - //this command is here to delete the identifier of unreleased files - //if it fails then a reserved identifier may still be present on the remote provider - } - } */ - fmit.remove(); - break; - } - } + + FileMetadataUtil.removeFileMetadataFromList(workingVersion.getFileMetadatas(), markedForDelete); + + FileMetadataUtil.removeDataFileFromList(newFiles, markedForDelete.getDataFile()); + FileUtil.deleteTempFile(markedForDelete.getDataFile(), dataset, ingestService); + } } diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 6558bb47b38..f6b5157a1a5 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -434,8 +434,10 @@
    - -

    #{bundle['file.deleteFileDialog.tip']}

    - -

    #{bundle['file.deleteFileDialog.failed.tip']}

    -
    -
    - - -
    -
    From 56ff7bc1081b639ebac054c2f723f7dcec103285 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 24 Feb 2022 14:39:07 -0500 Subject: [PATCH 050/366] fix thread use of requestscoped service --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index cd1574074e4..07d7455c20e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1784,7 +1784,7 @@ public void run() { * @return */ @GET - @Path("/archiveAllUnarchivedDataVersions") + @Path("/archiveAllUnarchivedDatasetVersions") public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) { try { @@ -1814,7 +1814,7 @@ public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") bool } String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName); AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dsl.get(0)); - + final DataverseRequest request = dvRequestService.getDataverseRequest(); if (cmd != null) { new Thread(new Runnable() { public void run() { @@ -1827,7 +1827,7 @@ public void run() { } if (!latestonly || dv.equals(dv.getDataset().getLatestVersionForCopy())) { try { - AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, dvRequestService.getDataverseRequest(), dv); + AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, request, dv); dv = commandEngine.submit(cmd); if (dv.getArchivalCopyLocation() != null) { From 46f8554f951c1449cc6794a5250e11c8e5b868b3 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 24 Feb 2022 14:39:38 -0500 Subject: [PATCH 051/366] update doc to match api call name --- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d4e0c0d6baa..0e77590125c 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -241,7 +241,7 @@ As for the "Remote only" authentication mode, it means that: - The "builtin" authentication provider has been disabled (:ref:`api-toggle-auth-provider`). Note that disabling the "builtin" authentication provider means that the API endpoint for converting an account from a remote auth provider will not work. Converting directly from one remote authentication provider to another (i.e. from GitHub to Google) is not supported. Conversion from remote is always to "builtin". Then the user initiates a conversion from "builtin" to remote. Note that longer term, the plan is to permit multiple login options to the same Dataverse installation account per https://github.com/IQSS/dataverse/issues/3487 (so all this talk of conversion will be moot) but for now users can only use a single login option, as explained in the :doc:`/user/account` section of the User Guide. In short, "remote only" might work for you if you only plan to use a single remote authentication provider such that no conversion between remote authentication providers will be necessary. File Storage: Using a Local Filesystem and/or Swift and/or object stores ---------------------------------------------------------------------------- +------------------------------------------------------------------------ By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara5/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\.directory`` JVM option described below. @@ -954,7 +954,7 @@ where: A batch API call is also available that will attempt to archive any currently unarchived dataset versions: -``curl -H "X-Dataverse-key: " http://localhost:8080/api/admin/archiveAllUnarchivedDataVersions`` +``curl -H "X-Dataverse-key: " http://localhost:8080/api/admin/archiveAllUnarchivedDatasetVersions`` The call supports three optional query parameters that can be used in combination: From 33b85f4eb5e32b3774df05e9b32b6f91b23be2e0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 24 Feb 2022 14:40:21 -0500 Subject: [PATCH 052/366] adjust to use a space per dataverse (alias) checks to see if space exists --- .../impl/DuraCloudSubmitToArchiveCommand.java | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index 468e99f24c1..17e69d7f356 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -61,8 +61,8 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t Credential credential = new Credential(System.getProperty("duracloud.username"), System.getProperty("duracloud.password")); storeManager.login(credential); - - String spaceName = dataset.getGlobalId().asString().replace(':', '-').replace('/', '-') + String spaceName=dataset.getOwner().getAlias(); + String baseFileName = dataset.getGlobalId().asString().replace(':', '-').replace('/', '-') .replace('.', '-').toLowerCase(); ContentStore store; @@ -75,7 +75,9 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t */ store = storeManager.getPrimaryContentStore(); // Create space to copy archival files to - store.createSpace(spaceName); + if(!store.spaceExists(spaceName)) { + store.createSpace(spaceName); + } DataCitation dc = new DataCitation(dv); Map metadata = dc.getDataCiteMetadata(); String dataciteXml = DOIDataCiteRegisterService.getMetadataFromDvObject( @@ -105,7 +107,7 @@ public void run() { Thread.sleep(10); i++; } - String checksum = store.addContent(spaceName, "datacite.xml", digestInputStream, -1l, null, null, + String checksum = store.addContent(spaceName,baseFileName + "_datacite.xml", digestInputStream, -1l, null, null, null); logger.fine("Content: datacite.xml added with checksum: " + checksum); String localchecksum = Hex.encodeHexString(digestInputStream.getMessageDigest().digest()); @@ -116,7 +118,7 @@ public void run() { } // Store BagIt file - String fileName = spaceName + "v" + dv.getFriendlyVersionNumber() + ".zip"; + String fileName = baseFileName + "v" + dv.getFriendlyVersionNumber() + ".zip"; // Add BagIt ZIP file // Although DuraCloud uses SHA-256 internally, it's API uses MD5 to verify the @@ -194,7 +196,7 @@ public void run() { if (!(1 == dv.getVersion()) || !(0 == dv.getMinorVersionNumber())) { mesg = mesg + ": Prior Version archiving not yet complete?"; } - return new Failure("Unable to create DuraCloud space with name: " + spaceName, mesg); + return new Failure("Unable to create DuraCloud space with name: " + baseFileName, mesg); } catch (NoSuchAlgorithmException e) { logger.severe("MD5 MessageDigest not available!"); } From e205f4b48c64607243812298026c4cfe65c454a0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 24 Feb 2022 17:34:50 -0500 Subject: [PATCH 053/366] custom version --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 235d1ec0317..c7cf357dd83 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ --> edu.harvard.iq dataverse - 5.9 + 5.9-dev-tdl war dataverse From 96389dfe022e554394eda8107838c82799066cc3 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 3 Mar 2022 16:42:13 -0500 Subject: [PATCH 054/366] #8400 add integration test for curation publish --- .../harvard/iq/dataverse/api/DatasetsIT.java | 74 +++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 23c17c071ff..985d0c38f28 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2584,4 +2584,78 @@ public void testFilesUnchangedAfterDatasetMetadataUpdate() throws IOException { } + @Test + public void testCuratePublishedDatasetVersionCommand() throws IOException { + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + createUser.then().assertThat() + .statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String username = UtilIT.getUsernameFromResponse(createUser); + + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + Integer datasetId = JsonPath.from(createDataset.asString()).getInt("data.id"); + + Path pathtoScript = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "run.sh"); + java.nio.file.Files.write(pathtoScript, "#!/bin/bash\necho hello".getBytes()); + + JsonObjectBuilder json1 = Json.createObjectBuilder() + .add("description", "A script to reproduce results.") + .add("directoryLabel", "code"); + + Response uploadReadme1 = UtilIT.uploadFileViaNative(datasetId.toString(), pathtoScript.toString(), json1.build(), apiToken); + uploadReadme1.prettyPrint(); + uploadReadme1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("run.sh")) + .body("data.files[0].directoryLabel", equalTo("code")); + + UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + Response getDatasetJsonBeforeUpdate = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJsonBeforeUpdate.prettyPrint(); + getDatasetJsonBeforeUpdate.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.latestVersion.files[0].label", equalTo("run.sh")) + .body("data.latestVersion.files[0].directoryLabel", equalTo("code")); + + String pathToJsonFile = "doc/sphinx-guides/source/_static/api/dataset-update-metadata.json"; + Response updateTitle = UtilIT.updateDatasetMetadataViaNative(datasetPid, pathToJsonFile, apiToken); + updateTitle.prettyPrint(); + updateTitle.then().assertThat() + .statusCode(OK.getStatusCode()); + + // shouldn't be able to update current unless you're a super user + + UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + + Response makeSuperUser = UtilIT.makeSuperUser(username); + + //should work after making super user + + UtilIT.publishDatasetViaNativeApi(datasetId, "updatecurrent", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + Response getDatasetJsonAfterUpdate = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJsonAfterUpdate.prettyPrint(); + getDatasetJsonAfterUpdate.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.latestVersion.files[0].label", equalTo("run.sh")) + .body("data.latestVersion.files[0].directoryLabel", equalTo("code")); + + } + } From 76c7c70012cd8382be9deed3b463f510f4f577ab Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 4 Mar 2022 16:43:37 -0500 Subject: [PATCH 055/366] #8191 fix edit metadata popup --- src/main/webapp/dataset.xhtml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index e4721aaf13c..4a470804ea8 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1900,14 +1900,16 @@ function editTerms(){ linkEditTerms(); } - function editMetadata(){ + function editMetadata(){ var validTOA = document.getElementById("datasetForm:validTermsofAccess").value; - if (validTOA === false){ + if (validTOA === 'false'){ PF('blockDatasetForm').show(); PF('accessPopup').show(); - } - editMedatdataCommand(); + } else { + editMedatdataCommand(); + } } + function testCheckBoxes() { var count = PF('versionsTable').getSelectedRowsCount(); if (count !== 2) { From 22fde437e5b5da6bc7f8bc0fcc1020debedc1974 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 7 Mar 2022 10:08:15 -0500 Subject: [PATCH 056/366] #8191 hide block ds on cancel --- src/main/webapp/file-edit-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index 50fafc66d31..e65670addd4 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -75,7 +75,7 @@ disabled="#{empty bean.termsOfAccess and !bean.fileAccessRequest}" onclick="PF('accessPopup').hide();" update=":messagePanel" /> - From 5c4b85a69349088906076df1eda189a5887c0075 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 8 Mar 2022 12:07:09 -0500 Subject: [PATCH 057/366] check versus undefined code, not null --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 5420762afd8..c8f89493332 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.DvObjectContainer; import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; import edu.harvard.iq.dataverse.GuestbookServiceBean; @@ -304,7 +305,7 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie ds.setGlobalIdCreateTime(null); //Verify metadatalanguage is allowed - if(ds.getMetadataLanguage()!= null && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) { + if(ds.getMetadataLanguage()!= DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE && !settingsService.getBaseMetadataLanguageMap(new HashMap(), true).containsKey(ds.getMetadataLanguage())) { throw new BadRequestException("Specified metadatalanguage (" + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed."); } From d9dfc3301901cc72a6566e6409284b63f57d9ff3 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Mar 2022 16:57:37 -0500 Subject: [PATCH 058/366] poi sec update Conflicts: pom.xml --- pom.xml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 7663bb3e6e6..32a671c0040 100644 --- a/pom.xml +++ b/pom.xml @@ -23,6 +23,7 @@ 5.2.4 1.20.1 0.8.7 + 5.2.1 org.apache.tika - tika-parsers - 1.27 + tika-core + ${tika.version} + + + org.apache.tika + tika-parsers-standard-package + ${tika.version} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java index 4a7b8ee1d34..c3a62a35bb3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java @@ -29,7 +29,6 @@ import org.apache.solr.client.solrj.SolrServerException; -import com.beust.jcommander.Strings; import com.google.api.LabelDescriptor; @RequiredPermissions(Permission.PublishDataset) From 2a1247f8191149866ce5e2b7f0f48980c555536d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Mar 2022 18:41:44 -0500 Subject: [PATCH 063/366] fix tika IOUtils imports --- .../harvard/iq/dataverse/dataaccess/StoredOriginalFile.java | 3 ++- .../java/edu/harvard/iq/dataverse/search/IndexServiceBean.java | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java index 587cf847faf..068f11ccf85 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StoredOriginalFile.java @@ -27,7 +27,8 @@ import java.nio.channels.ReadableByteChannel; import java.util.logging.Logger; -import org.apache.tika.io.IOUtils; +import org.apache.commons.io.IOUtils; + /** * * @author Leonid Andreev diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index e4844156271..7da401b5ce2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -67,6 +67,8 @@ import javax.json.JsonObject; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; + +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; @@ -80,7 +82,6 @@ import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.CursorMarkParams; import org.apache.tika.parser.AutoDetectParser; -import org.apache.tika.io.IOUtils; import org.apache.tika.metadata.Metadata; import org.apache.tika.parser.ParseContext; import org.apache.tika.sax.BodyContentHandler; From 6a1333dbdd07d30bc6833b1bedee7c0faad4434e Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Mar 2022 22:35:08 -0500 Subject: [PATCH 064/366] replace another Strings.join --- .../java/edu/harvard/iq/dataverse/api/Datasets.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e21396dd487..8c67aecc242 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -146,9 +147,6 @@ import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; import com.amazonaws.services.s3.model.PartETag; -import com.beust.jcommander.Strings; - -import java.util.Map.Entry; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -2691,9 +2689,12 @@ public Response listLocks(@QueryParam("type") String lockType, @QueryParam("user try { lockTypeValue = DatasetLock.Reason.valueOf(lockType); } catch (IllegalArgumentException iax) { - String validValues = Strings.join(",", DatasetLock.Reason.values()); + StringJoiner reasonJoiner = new StringJoiner(", "); + for (Reason r: Reason.values()) { + reasonJoiner.add(r.name()); + }; String errorMessage = "Invalid lock type value: " + lockType + - "; valid lock types: " + validValues; + "; valid lock types: " + reasonJoiner.toString(); return error(Response.Status.BAD_REQUEST, errorMessage); } } From dd968ef2efdb9b4aedd399a902b3018eb5426de7 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Mar 2022 22:40:58 -0500 Subject: [PATCH 065/366] adjust richtextstring reading --- .../ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java index 4727c847b75..ea3f3868f24 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/xlsx/XLSXFileReader.java @@ -410,7 +410,7 @@ public void endElement(String uri, String localName, String name) // Do it now, as characters() may be called more than once if (nextIsString) { int idx = Integer.parseInt(cellContents); - cellContents = new XSSFRichTextString(sst.getEntryAt(idx)).toString(); + cellContents = sst.getItemAt(idx).getString(); nextIsString = false; } From 9b782a77009f78dcd667f12d28122b5da464a340 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 9 Mar 2022 09:31:02 -0500 Subject: [PATCH 066/366] #8191 add validate or die to commands --- .../dataverse/TermsOfUseAndAccessValidator.java | 2 +- .../command/impl/AbstractDatasetCommand.java | 16 +++++++++++----- .../CuratePublishedDatasetVersionCommand.java | 2 +- .../impl/FinalizeDatasetPublicationCommand.java | 4 +++- .../command/impl/PublishDatasetCommand.java | 3 ++- src/main/java/propertyFiles/Bundle.properties | 2 +- 6 files changed, 19 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java index cf3edb29d5f..ee094d772e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java @@ -39,7 +39,7 @@ public static boolean isTOUAValid(TermsOfUseAndAccess value, ConstraintValidator if (!valid) { try { if (context != null) { - context.buildConstraintViolationWithTemplate("If Request Access is false then Terms of Access must be provided.").addConstraintViolation(); + context.buildConstraintViolationWithTemplate(BundleUtil.getStringFromBundle("toua.invalid")).addConstraintViolation(); } value.setValidationMessage(BundleUtil.getStringFromBundle("toua.invalid")); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java index e66b5c9043d..49b357a940b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java @@ -21,6 +21,7 @@ import static java.util.stream.Collectors.joining; import javax.validation.ConstraintViolation; import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean; /** @@ -96,18 +97,23 @@ protected void createDatasetUser(CommandContext ctxt) { */ protected void validateOrDie(DatasetVersion dsv, Boolean lenient) throws CommandException { Set constraintViolations = dsv.validate(); + if (!constraintViolations.isEmpty()) { if (lenient) { - // populate invalid fields with N/A - constraintViolations.stream() + // populate invalid fields with N/A + // ignore invalid toua + constraintViolations.stream().filter(cv -> cv.getRootBean() instanceof DatasetField) .map(cv -> ((DatasetField) cv.getRootBean())) - .forEach(f -> f.setSingleValue(DatasetField.NA_VALUE)); - - } else { + .forEach(f -> f.setSingleValue(DatasetField.NA_VALUE)); + } else { // explode with a helpful message String validationMessage = constraintViolations.stream() .map(cv -> cv.getMessage() + " (Invalid value:" + cv.getInvalidValue() + ")") .collect(joining(", ", "Validation Failed: ", ".")); + + validationMessage += constraintViolations.stream() + .filter(cv -> cv.getRootBean() instanceof TermsOfUseAndAccess) + .map(cv -> cv.toString()); throw new IllegalCommandException(validationMessage, this); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 24966f0a548..fcc0cda11fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -34,7 +34,7 @@ public class CuratePublishedDatasetVersionCommand extends AbstractDatasetCommand { private static final Logger logger = Logger.getLogger(CuratePublishedDatasetVersionCommand.class.getCanonicalName()); - final private boolean validateLenient = true; + final private boolean validateLenient = false; public CuratePublishedDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest) { super(aRequest, theDataset); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index 01ac0cf5804..52b7e1c5376 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -84,7 +84,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { validateDataFiles(theDataset, ctxt); // (this will throw a CommandException if it fails) } - + + validateOrDie(theDataset.getLatestVersion(), false); + /* * Try to register the dataset identifier. For PID providers that have registerWhenPublished == false (all except the FAKE provider at present) * the registerExternalIdentifier command will make one try to create the identifier if needed (e.g. if reserving at dataset creation wasn't done/failed). diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 665172a1e9f..8a0e9b91066 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -70,8 +70,9 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException // When importing a released dataset, the latest version is marked as RELEASED. Dataset theDataset = getDataset(); - + validateOrDie(theDataset.getLatestVersion(), false); + //ToDo - any reason to set the version in publish versus finalize? Failure in a prepub workflow or finalize will leave draft versions with an assigned version number as is. //Changing the dataset in this transaction also potentially makes a race condition with a prepub workflow, possibly resulting in an OptimisticLockException there. diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 26aed12ab65..1d8601211bf 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2301,7 +2301,7 @@ email.invalid=is not a valid email address. url.invalid=is not a valid URL. #TermsOfUseAndAccessValidator -toua.invalid=Constraint violation found in Terms of Use and Access. If Request Access to restricted files is set to false then Terms of Access must be provided. +toua.invalid=Constraint violation found in Terms of Use and Access. If restricted files are present then Request Access must be allowed or Terms of Access must be provided. #HarvestingClientsPage.java From 57813947aa310c8d85d03ec45abf926b3c230d5f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 9 Mar 2022 10:53:42 -0500 Subject: [PATCH 067/366] #8191 add integration test for req/toa validation --- src/main/java/propertyFiles/Bundle.properties | 1 + .../harvard/iq/dataverse/api/DatasetsIT.java | 65 +++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 9b22c83ac74..64eab5f3554 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2510,6 +2510,7 @@ access.api.allowRequests.failure.noSave=Problem saving dataset {0}: {1} access.api.allowRequests.allows=allows access.api.allowRequests.disallows=disallows access.api.allowRequests.success=Dataset {0} {1} file access requests. +access.api.fileAccess.failure.noSave=Could not update Request Access for {0} Error Message {1} access.api.fileAccess.failure.noUser=Could not find user to execute command: {0} access.api.requestAccess.failure.commandError=Problem trying request access on {0} : {1} access.api.requestAccess.failure.requestExists=An access request for this file on your behalf already exists. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 23c17c071ff..96354c37cc3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2584,4 +2584,69 @@ public void testFilesUnchangedAfterDatasetMetadataUpdate() throws IOException { } + /** + * In this test we are restricting a file and testing that terms of accees + * or request access is required + * + * Export at the dataset level is always the public version. + * + */ + @Test + public void testRestrictFileTermsOfUseAndAccess() throws IOException { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String authorUsername = UtilIT.getUsernameFromResponse(createUser); + String authorApiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverse = UtilIT.createRandomDataverse(authorApiToken); + createDataverse.prettyPrint(); + createDataverse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + Path pathToFile = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "data.csv"); + String contentOfCsv = "" + + "name,pounds,species\n" + + "Marshall,40,dog\n" + + "Tiger,17,cat\n" + + "Panther,21,cat\n"; + java.nio.file.Files.write(pathToFile, contentOfCsv.getBytes()); + + Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile.toString(), authorApiToken); + uploadFile.prettyPrint(); + uploadFile.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("data.csv")); + + String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id"); + + assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); + + Response restrictFile = UtilIT.restrictFile(fileId, true, authorApiToken); + restrictFile.prettyPrint(); + restrictFile.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, authorApiToken); + publishDataverse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPid, "major", authorApiToken); + publishDataset.then().assertThat().statusCode(OK.getStatusCode()); + + + //not allowed to remove request access if there are retricted files + + Response disallowRequestAccess = UtilIT.allowAccessRequests(datasetPid, false, authorApiToken); + disallowRequestAccess.prettyPrint(); + disallowRequestAccess.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + } + } From 8da1b43921f2e7d0ee1bd5c824fd5e10eb3e5370 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 10 Mar 2022 11:11:32 -0500 Subject: [PATCH 068/366] #8191 fix default values disable edits --- .../edu/harvard/iq/dataverse/DatasetPage.java | 3 ++- .../harvard/iq/dataverse/DatasetVersion.java | 1 + src/main/webapp/dataset.xhtml | 17 ++++++++++++++--- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 2374c3841fd..24830722a5b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2056,7 +2056,8 @@ private String init(boolean initFull) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.message.label.fileAccess"), BundleUtil.getStringFromBundle("dataset.message.publicInstall")); } - + setFileAccessRequest(workingVersion.getTermsOfUseAndAccess().isFileAccessRequest()); + setTermsOfAccess(workingVersion.getTermsOfUseAndAccess().getTermsOfAccess()); resetVersionUI(); // FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Add New Dataset", " - Enter metadata to create the dataset's citation. You can add more metadata about this dataset after it's created.")); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index c2fe5dd646b..fc8f31c1aaa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -647,6 +647,7 @@ public void initDefaultValues(License license) { TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); terms.setDatasetVersion(this); terms.setLicense(license); + terms.setFileAccessRequest(true); this.setTermsOfUseAndAccess(terms); } diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 19a2b0204a0..a322cf1c3f9 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -370,14 +370,25 @@
    @@ -750,9 +749,9 @@ - + @@ -826,7 +825,7 @@ - +
    - - + @@ -1845,7 +1844,7 @@
    - + + bind_bsui_components(); @@ -108,9 +109,6 @@ } ; }); - - // Rebind bootstrap UI components - bind_bsui_components(); }); function clickSupportLink() { $("[id$='headerSupportLink']").click(); From 8f9e7ebc5a420d17f3308d9cd7e9884f9919500b Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Apr 2022 14:11:22 -0400 Subject: [PATCH 154/366] don't close response in 200 case --- .../iq/dataverse/util/bagit/BagGenerator.java | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java index 052e4ccf29b..94b4cc1a6c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.util.bagit; +import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; @@ -1005,23 +1006,34 @@ public InputStream get() { logger.fine("Get # " + tries + " for " + uriString); HttpGet getMap = createNewGetRequest(uri, null); logger.finest("Retrieving " + tries + ": " + uriString); - try (CloseableHttpResponse response = client.execute(getMap)) { + CloseableHttpResponse response = null; + try { + response = client.execute(getMap); // Note - if we ever need to pass an HttpClientContext, we need a new one per // thread. - int statusCode= response.getStatusLine().getStatusCode(); + int statusCode = response.getStatusLine().getStatusCode(); if (statusCode == 200) { logger.finest("Retrieved: " + uri); return response.getEntity().getContent(); } - logger.warning("Attempt: " + tries + " - Unexpected Status when retrieving " + uriString + " : " + statusCode); - if(statusCode < 500) { + logger.warning("Attempt: " + tries + " - Unexpected Status when retrieving " + uriString + + " : " + statusCode); + if (statusCode < 500) { logger.fine("Will not retry for 40x errors"); - tries +=5; + tries += 5; } else { tries++; } - //Shouldn't be needed - leaving until the Premature end of Content-Legnth delimited message body errors are resolved - //EntityUtils.consumeQuietly(response.getEntity()); + // Error handling + if (response != null) { + try { + EntityUtils.consumeQuietly(response.getEntity()); + response.close(); + } catch (IOException io) { + logger.warning( + "Exception closing response after status: " + statusCode + " on " + uri); + } + } } catch (ClientProtocolException e) { tries += 5; // TODO Auto-generated catch block @@ -1037,7 +1049,9 @@ public InputStream get() { } e.printStackTrace(); } + } + } catch (URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); From 14cec22f917e9503254888e89694213790bb7bf0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Apr 2022 14:11:34 -0400 Subject: [PATCH 155/366] add version to datacite file --- .../engine/command/impl/DuraCloudSubmitToArchiveCommand.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index 838238f4e9e..79c42853706 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -97,7 +97,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t */ String spaceName = dataset.getOwner().getAlias().toLowerCase().replaceAll("[^a-z0-9-]", ".dcsafe"); String baseFileName = dataset.getGlobalId().asString().replace(':', '-').replace('/', '-') - .replace('.', '-').toLowerCase(); + .replace('.', '-').toLowerCase() + "v" + dv.getFriendlyVersionNumber(); ContentStore store; try { @@ -163,7 +163,7 @@ public void run() { // Store BagIt file success = false; - String fileName = baseFileName + "v" + dv.getFriendlyVersionNumber() + ".zip"; + String fileName = baseFileName + ".zip"; // Add BagIt ZIP file // Although DuraCloud uses SHA-256 internally, it's API uses MD5 to verify the From 345c97ae756a5eed6349c61a728b04d3b5b0a0d9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Apr 2022 14:31:09 -0400 Subject: [PATCH 156/366] add _ before version --- .../engine/command/impl/DuraCloudSubmitToArchiveCommand.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java index 79c42853706..b3b303d7407 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java @@ -97,7 +97,7 @@ public WorkflowStepResult performArchiveSubmission(DatasetVersion dv, ApiToken t */ String spaceName = dataset.getOwner().getAlias().toLowerCase().replaceAll("[^a-z0-9-]", ".dcsafe"); String baseFileName = dataset.getGlobalId().asString().replace(':', '-').replace('/', '-') - .replace('.', '-').toLowerCase() + "v" + dv.getFriendlyVersionNumber(); + .replace('.', '-').toLowerCase() + "_v" + dv.getFriendlyVersionNumber(); ContentStore store; try { From 1be42f5b49e409b0acf37a135f4fc0307c220dc4 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 5 Apr 2022 14:31:26 -0400 Subject: [PATCH 157/366] count success/fail correctly --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 07d7455c20e..719b4aeb1ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1830,7 +1830,7 @@ public void run() { AbstractSubmitToArchiveCommand cmd = ArchiverUtil.createSubmitToArchiveCommand(className, request, dv); dv = commandEngine.submit(cmd); - if (dv.getArchivalCopyLocation() != null) { + if (!dv.getArchivalCopyLocation().equals("Attempted")) { successes++; logger.info("DatasetVersion id=" + dv.getDataset().getGlobalId().toString() + " v" + dv.getFriendlyVersionNumber() + " submitted to Archive at: " + dv.getArchivalCopyLocation()); From 0934c5c19823cece6dce4cab6f1de8fb00414f44 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 5 Apr 2022 14:58:27 -0400 Subject: [PATCH 158/366] #7565 update widgets --- src/main/webapp/dataset-widgets.xhtml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/dataset-widgets.xhtml b/src/main/webapp/dataset-widgets.xhtml index 52a833eb70d..a57f144b97a 100644 --- a/src/main/webapp/dataset-widgets.xhtml +++ b/src/main/webapp/dataset-widgets.xhtml @@ -44,7 +44,7 @@ #{bundle['dataset.thumbnailsAndWidget.thumbnailImage.alt']} - +
    From a372fe420da01426574dbe9a4be3cc09629917fc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 2 Feb 2022 12:07:13 -0500 Subject: [PATCH 159/366] Update 'report-datasets' jsonarray and docs for counter-proc. v0.1.04 --- doc/sphinx-guides/source/admin/make-data-count.rst | 6 +++--- .../source/developers/make-data-count.rst | 4 ++-- .../source/installation/prerequisites.rst | 10 +++++----- scripts/vagrant/setup-counter-processor.sh | 2 +- .../makedatacount/DatasetMetricsServiceBean.java | 7 ++++++- 5 files changed, 17 insertions(+), 12 deletions(-) diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index 8d3dbba5127..3f1b04c3c36 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -83,9 +83,9 @@ Configure Counter Processor * Change to the directory where you installed Counter Processor. - * ``cd /usr/local/counter-processor-0.0.1`` + * ``cd /usr/local/counter-processor-0.1.04`` -* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-0.0.1``. +* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-0.1.04``. * Edit the config file and pay particular attention to the FIXME lines. @@ -98,7 +98,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single * Change to the directory where you installed Counter Processor. - * ``cd /usr/local/counter-processor-0.0.1`` + * ``cd /usr/local/counter-processor-0.1.04`` * If you are running Counter Processor for the first time in the middle of a month, you will need create blank log files for the previous days. e.g.: diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index 88ca0007a8b..9c6095136b8 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -51,7 +51,7 @@ Once you are done with your configuration, you can run Counter Processor like th ``su - counter`` -``cd /usr/local/counter-processor-0.0.1`` +``cd /usr/local/counter-processor-0.1.04`` ``CONFIG_FILE=counter-processor-config.yaml python36 main.py`` @@ -84,7 +84,7 @@ Second, if you are also sending your SUSHI report to Make Data Count, you will n ``curl -H "Authorization: Bearer $JSON_WEB_TOKEN" -X DELETE https://$MDC_SERVER/reports/$REPORT_ID`` -To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-0.0.1/tmp/datacite_response_body.txt`` +To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-0.1.04/tmp/datacite_response_body.txt`` To read more about the Make Data Count api, see https://github.com/datacite/sashimi diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 85a5cfa4d8a..98756b08df7 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -96,7 +96,7 @@ PostgreSQL ---------- Installing PostgreSQL -======================= +===================== The application has been tested with PostgreSQL versions up to 13 and version 10+ is required. We recommend installing the latest version that is available for your OS distribution. *For example*, to install PostgreSQL 13 under RHEL7/derivative:: @@ -421,12 +421,12 @@ Counter Processor has only been tested on el7 (see "Linux" above). Please note t As root, download and install Counter Processor:: cd /usr/local - wget https://github.com/CDLUC3/counter-processor/archive/v0.0.1.tar.gz + wget https://github.com/CDLUC3/counter-processor/archive/v0.1.04.tar.gz tar xvfz v0.0.1.tar.gz As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball, untar it, and copy the geoip database into place:: - cd /usr/local/counter-processor-0.0.1 + cd /usr/local/counter-processor-0.1.04 wget https://geolite.maxmind.com/download/geoip/database/GeoLite2-Country.tar.gz tar xvfz GeoLite2-Country.tar.gz cp GeoLite2-Country_*/GeoLite2-Country.mmdb maxmind_geoip @@ -434,7 +434,7 @@ As root, change to the Counter Processor directory you just created, download th As root, create a "counter" user and change ownership of Counter Processor directory to this new user:: useradd counter - chown -R counter:counter /usr/local/counter-processor-0.0.1 + chown -R counter:counter /usr/local/counter-processor-0.1.04 Installing Counter Processor Python Requirements ================================================ @@ -452,7 +452,7 @@ Install Python 3.6:: Install Counter Processor Python requirements:: python3.6 -m ensurepip - cd /usr/local/counter-processor-0.0.1 + cd /usr/local/counter-processor-0.1.04 pip3 install -r requirements.txt See the :doc:`/admin/make-data-count` section of the Admin Guide for how to configure and run Counter Processor. diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh index 29d860208e5..659dc5163f4 100755 --- a/scripts/vagrant/setup-counter-processor.sh +++ b/scripts/vagrant/setup-counter-processor.sh @@ -11,7 +11,7 @@ python3.6 -m ensurepip COUNTER_USER=counter echo "Ensuring Unix user '$COUNTER_USER' exists" useradd $COUNTER_USER || : -COMMIT='a73dbced06f0ac2f0d85231e4d9dd4f21bee8487' +COMMIT='7974dad259465ba196ef639f48dea007cae8f9ac' UNZIPPED_DIR="counter-processor-$COMMIT" if [ ! -e $UNZIPPED_DIR ]; then ZIP_FILE="${COMMIT}.zip" diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 9c9dc24a17a..39afdf318ad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -118,7 +118,12 @@ public List parseSushiReport(JsonObject report){ public List parseSushiReport(JsonObject report, Dataset dataset) { List datasetMetricsAll = new ArrayList<>(); - JsonArray reportDatasets = report.getJsonArray("report_datasets"); + //Current counter-processor v 0.1.04+ format + JsonArray reportDatasets = report.getJsonArray("report-datasets"); + if(reportDatasets==null) { + //Try counter-processor v 0.0.1 name + reportDatasets = report.getJsonArray("report_datasets"); + } for (JsonValue reportDataset : reportDatasets) { List datasetMetricsDataset = new ArrayList<>(); String globalId = null; From 4bc63843df81d9b7b2f2c4f81fa4796304574668 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 2 Feb 2022 12:28:25 -0500 Subject: [PATCH 160/366] #6629 update GeoLite db docs --- .../source/installation/prerequisites.rst | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 98756b08df7..92beebb245f 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -423,14 +423,24 @@ As root, download and install Counter Processor:: cd /usr/local wget https://github.com/CDLUC3/counter-processor/archive/v0.1.04.tar.gz tar xvfz v0.0.1.tar.gz + cd /usr/local/counter-processor-0.1.04 -As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball, untar it, and copy the geoip database into place:: +Installing GeoLite Country Database +=================================== - cd /usr/local/counter-processor-0.1.04 - wget https://geolite.maxmind.com/download/geoip/database/GeoLite2-Country.tar.gz +Counter Processor can report per country results if the optional GeoLite Country Database is installed. At present, this database is free but to use it one nust signing an agreement (EULA) with MaxMind. +(The primary concern appears to be that individuals can opt-out of having their location tracked via IP address and, due to various privacy laws, MaxMind needs a way to comply with that for products it has 'sold' (for no cost in this case). Their agreement requires you to either configure automatic updates to the GeoLite Country database or be responsible on your own for managing take down notices.) +The process required to sign up, download the database, and to configure automated updating is described at https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases/ and the links from that page. + +As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball from MaxMind, untar it, and copy the geoip database into place:: + + tar xvfz GeoLite2-Country.tar.gz cp GeoLite2-Country_*/GeoLite2-Country.mmdb maxmind_geoip +Creating a counter User +======================= + As root, create a "counter" user and change ownership of Counter Processor directory to this new user:: useradd counter From 7405b8c699f861f452c0a88d5277bf0d1d07ae77 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 14 Mar 2022 15:11:29 -0400 Subject: [PATCH 161/366] updates per review --- .../source/installation/prerequisites.rst | 6 +++--- scripts/vagrant/setup-counter-processor.sh | 14 +------------- 2 files changed, 4 insertions(+), 16 deletions(-) diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 92beebb245f..2fe2c7d1922 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -422,14 +422,14 @@ As root, download and install Counter Processor:: cd /usr/local wget https://github.com/CDLUC3/counter-processor/archive/v0.1.04.tar.gz - tar xvfz v0.0.1.tar.gz + tar xvfz v0.1.04.tar.gz cd /usr/local/counter-processor-0.1.04 Installing GeoLite Country Database =================================== -Counter Processor can report per country results if the optional GeoLite Country Database is installed. At present, this database is free but to use it one nust signing an agreement (EULA) with MaxMind. -(The primary concern appears to be that individuals can opt-out of having their location tracked via IP address and, due to various privacy laws, MaxMind needs a way to comply with that for products it has 'sold' (for no cost in this case). Their agreement requires you to either configure automatic updates to the GeoLite Country database or be responsible on your own for managing take down notices.) +Counter Processor can report per country results if the optional GeoLite Country Database is installed. At present, this database is free but to use it one must signing an agreement (EULA) with MaxMind. +(The primary concern appears to be that individuals can opt-out of having their location tracked via IP address and, due to various privacy laws, MaxMind needs a way to comply with that for products it has "sold" (for no cost in this case). Their agreement requires you to either configure automatic updates to the GeoLite Country database or be responsible on your own for managing take down notices.) The process required to sign up, download the database, and to configure automated updating is described at https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases/ and the links from that page. As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball from MaxMind, untar it, and copy the geoip database into place:: diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh index 659dc5163f4..e28611f07c2 100755 --- a/scripts/vagrant/setup-counter-processor.sh +++ b/scripts/vagrant/setup-counter-processor.sh @@ -20,19 +20,7 @@ if [ ! -e $UNZIPPED_DIR ]; then unzip $ZIP_FILE fi cd $UNZIPPED_DIR -GEOIP_DIR='maxmind_geoip' -GEOIP_FILE='GeoLite2-Country.mmdb' -GEOIP_PATH_TO_FILE="$GEOIP_DIR/$GEOIP_FILE" -if [ ! -e $GEOIP_PATH_TO_FILE ]; then - echo "let's do this thing" - TARBALL='GeoLite2-Country.tar.gz' - wget https://geolite.maxmind.com/download/geoip/database/$TARBALL - tar xfz GeoLite2-Country.tar.gz - # Glob (*) below because of directories like "GeoLite2-Country_20181204". - GEOIP_UNTARRED_DIR='GeoLite2-Country_*' - mv $GEOIP_UNTARRED_DIR/$GEOIP_FILE $GEOIP_PATH_TO_FILE - rm -rf $TARBALL $GEOIP_UNTARRED_DIR -fi +echo Installation of the GeoLite2 country database for counter-processor can no longer be automated. See the Installation Guide for the manual installation process. pip3 install -r requirements.txt # For now, parsing sample_logs/counter_2018-05-08.log for i in `echo {00..31}`; do From 61d8933201cb89a0a5f960eaecc7af43c1603ec3 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Mon, 14 Mar 2022 15:15:18 -0400 Subject: [PATCH 162/366] add quotes in echo --- scripts/vagrant/setup-counter-processor.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh index e28611f07c2..a418e8d6251 100755 --- a/scripts/vagrant/setup-counter-processor.sh +++ b/scripts/vagrant/setup-counter-processor.sh @@ -20,7 +20,7 @@ if [ ! -e $UNZIPPED_DIR ]; then unzip $ZIP_FILE fi cd $UNZIPPED_DIR -echo Installation of the GeoLite2 country database for counter-processor can no longer be automated. See the Installation Guide for the manual installation process. +echo "Installation of the GeoLite2 country database for counter-processor can no longer be automated. See the Installation Guide for the manual installation process." pip3 install -r requirements.txt # For now, parsing sample_logs/counter_2018-05-08.log for i in `echo {00..31}`; do From 3079af5ce43a4e67d8e25b32e8326e5b29fd17a4 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 6 Apr 2022 15:01:06 -0400 Subject: [PATCH 163/366] #8191 add warning for TOA out of compliance --- src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/dataset-license-terms.xhtml | 7 ++++++- src/main/webapp/file-edit-popup-fragment.xhtml | 9 ++++++--- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 097d01a895c..f19fa0a9b21 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1788,6 +1788,7 @@ file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Users may request access to files. file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Users may not request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.warning.outofcompliance=You must enable request access or add terms of access to restrict file access. file.dataFilesTab.terms.list.termsOfAccess.embargoed=Files are unavailable during the specified embargo. file.dataFilesTab.terms.list.termsOfAccess.embargoedthenrestricted=Files are unavailable during the specified embargo and restricted after that. file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Enable access request diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index a3bf646c712..2800917dffe 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -293,7 +293,6 @@
    - #{!datasetPage.hasValidTermsOfAccess}
    @@ -331,6 +330,12 @@
    + + +
    + #{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.warning.outofcompliance']} +
    +
    + + +
    + #{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.warning.outofcompliance']} +
    +
    -
    - #{termsOfUseAndAccess.validationMessage} -
    Date: Wed, 6 Apr 2022 15:23:18 -0400 Subject: [PATCH 164/366] #7565 remove inadvertent space --- src/main/webapp/dataset.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 7e4f579f764..f7f12ef761e 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1844,7 +1844,7 @@
    - + From 25dc6813f2badcccf44f9bd23646d25db505c485 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 7 Apr 2022 17:09:04 -0400 Subject: [PATCH 171/366] autoset emailconfirmed timestamp for Shib users, rm check for stale tokens #5663 For Shib users we now set the emailconfirmed timestamp on login. (The guides say we do this already but are wrong. It was only being set on account creation.) For Shib users, I also prevent "check for your welcome email to verify your address" from being shown in the in-app welcome/new account notification. I put in a check to make sure Shib users never get a "verify your email address" email notification. Finally, I removed the hasNoStaleVerificationTokens check from the hasVerifiedEmail method. We've never worried about if there are stale verification tokens in the database or not and this check was preventing "Verified" from being shown, even when the user has a timestamp (the timestamp being the way we know if an email is verified or not). --- doc/release-notes/5663-shib-confirm-email.md | 7 +++++++ .../source/admin/user-administration.rst | 2 +- .../AuthenticationServiceBean.java | 4 ++-- .../shib/ShibAuthenticationProvider.java | 4 ++++ .../authorization/users/AuthenticatedUser.java | 9 +++++++++ .../confirmemail/ConfirmEmailServiceBean.java | 17 ++++++++++++++--- src/main/java/propertyFiles/Bundle.properties | 3 ++- src/main/webapp/dataverseuser.xhtml | 2 ++ .../iq/dataverse/branding/BrandingUtilTest.java | 3 ++- 9 files changed, 43 insertions(+), 8 deletions(-) create mode 100644 doc/release-notes/5663-shib-confirm-email.md diff --git a/doc/release-notes/5663-shib-confirm-email.md b/doc/release-notes/5663-shib-confirm-email.md new file mode 100644 index 00000000000..b6ef4306d4b --- /dev/null +++ b/doc/release-notes/5663-shib-confirm-email.md @@ -0,0 +1,7 @@ +For Shib users we now set the emailconfirmed timestamp on login. (The guides say we do this already but are wrong. It was only being set on account creation.) + +For Shib users, I also prevent "check for your welcome email to verify your address" from being shown in the in-app welcome/new account notification. + +I put in a check to make sure Shib users never get a "verify your email address" email notification. + +Finally, I removed the hasNoStaleVerificationTokens check from the hasVerifiedEmail method. We've never worried about if there are stale verification tokens in the database or not and this check was preventing "Verified" from being shown, even when the user has a timestamp (the timestamp being the way we know if an email is verified or not). diff --git a/doc/sphinx-guides/source/admin/user-administration.rst b/doc/sphinx-guides/source/admin/user-administration.rst index 867f06bde8e..df9a9f61aaa 100644 --- a/doc/sphinx-guides/source/admin/user-administration.rst +++ b/doc/sphinx-guides/source/admin/user-administration.rst @@ -63,7 +63,7 @@ The app will send a standard welcome email with a URL the user can click, which, Should users' URL token expire, they will see a "Verify Email" button on the account information page to send another URL. -Sysadmins can determine which users have verified their email addresses by looking for the presence of the value ``emailLastConfirmed`` in the JSON output from listing users (see :ref:`admin` section of Native API in the API Guide). As mentioned in the :doc:`/user/account` section of the User Guide, the email addresses for Shibboleth users are re-confirmed on every login. +Sysadmins can determine which users have verified their email addresses by looking for the presence of the value ``emailLastConfirmed`` in the JSON output from listing users (see :ref:`admin` section of Native API in the API Guide). As mentioned in the :doc:`/user/account` section of the User Guide, the email addresses for Shibboleth users are re-confirmed on every login (so their welcome email does not contain a URL to click for this purpose). Deleting an API Token --------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java index dd4b5430bd1..b242cd2936f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java @@ -636,9 +636,8 @@ public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecord authenticatedUser.setAuthenticatedUserLookup(auusLookup); if (ShibAuthenticationProvider.PROVIDER_ID.equals(auusLookup.getAuthenticationProviderId())) { - Timestamp emailConfirmedNow = new Timestamp(new Date().getTime()); // Email addresses for Shib users are confirmed by the Identity Provider. - authenticatedUser.setEmailConfirmed(emailConfirmedNow); + authenticatedUser.updateEmailConfirmedToNow(); authenticatedUser = save(authenticatedUser); } else { /* @todo Rather than creating a token directly here it might be @@ -665,6 +664,7 @@ public boolean identifierExists( String idtf ) { public AuthenticatedUser updateAuthenticatedUser(AuthenticatedUser user, AuthenticatedUserDisplayInfo userDisplayInfo) { user.applyDisplayInfo(userDisplayInfo); + user.updateEmailConfirmedToNow(); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "updateUser") .setInfo(user.getIdentifier())); return update(user); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibAuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibAuthenticationProvider.java index f7c00a1635d..e7dccc34300 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibAuthenticationProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibAuthenticationProvider.java @@ -33,4 +33,8 @@ public boolean isDisplayIdentifier() { return false; } + // We don't override "isEmailVerified" because we're using timestamps + // ("emailconfirmed" on the "authenticateduser" table) to know if + // Shib users have confirmed/verified their email or not. + } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java index 9d76ce0e47c..2a7fc8194d3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java @@ -10,11 +10,13 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2TokenData; import edu.harvard.iq.dataverse.userdata.UserUtil; import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP; +import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider; import edu.harvard.iq.dataverse.util.BundleUtil; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import java.io.Serializable; import java.sql.Timestamp; +import java.util.Date; import java.util.List; import java.util.Objects; import javax.json.Json; @@ -193,6 +195,13 @@ public void applyDisplayInfo( AuthenticatedUserDisplayInfo inf ) { } } + // For Shib users, set "email confirmed" timestamp on login. + public void updateEmailConfirmedToNow() { + if (ShibAuthenticationProvider.PROVIDER_ID.equals(this.getAuthenticatedUserLookup().getAuthenticationProviderId())) { + Timestamp emailConfirmedNow = new Timestamp(new Date().getTime()); + this.setEmailConfirmed(emailConfirmedNow); + } + } //For User List Admin dashboard @Transient diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java index e8748f1e158..5fdf40d3833 100644 --- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java @@ -54,10 +54,16 @@ public class ConfirmEmailServiceBean { */ public boolean hasVerifiedEmail(AuthenticatedUser user) { boolean hasTimestamp = user.getEmailConfirmed() != null; - boolean hasNoStaleVerificationTokens = this.findSingleConfirmEmailDataByUser(user) == null; boolean isVerifiedByAuthProvider = authenticationService.lookupProvider(user).isEmailVerified(); - - return (hasTimestamp && hasNoStaleVerificationTokens) || isVerifiedByAuthProvider; + // Note: In practice, we are relying on hasTimestamp to know if an email + // has been confirmed/verified or not. We have switched the Shib code to automatically + // overwrite the "confirm email" timestamp on login. So hasTimeStamp will be enough. + // If we ever want to get away from using "confirmed email" timestamps for Shib users + // we can make use of the isVerifiedByAuthProvider boolean. Currently, + // isVerifiedByAuthProvider is set to false in the super class and nothing + // is overridden in the shib auth provider (or any auth provider) but we could override + // isVerifiedByAuthProvider in the Shib auth provider and have it return true. + return hasTimestamp || isVerifiedByAuthProvider; } /** @@ -128,6 +134,11 @@ private void sendLinkOnEmailChange(AuthenticatedUser aUser, String confirmationU userNotification.setType(UserNotification.Type.CONFIRMEMAIL); String subject = MailUtil.getSubjectTextBasedOnNotification(userNotification, null); logger.fine("sending email to " + toAddress + " with this subject: " + subject); + if (ShibAuthenticationProvider.PROVIDER_ID.equals(aUser.getAuthenticatedUserLookup().getAuthenticationProviderId())) { + // Shib users have "emailconfirmed" timestamp set on login. + logger.info("Returning early to prevent an email confirmation link from being sent to Shib user " + aUser.getUserIdentifier() + "."); + return; + } mailService.sendSystemEmail(toAddress, subject, messageBody); } catch (Exception ex) { /** diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index f8c2e5f97ec..8bcf6960b2d 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -198,7 +198,8 @@ wasReturnedByReviewer=, was returned by the curator of # TODO: Confirm that "toReview" can be deleted. toReview=Don't forget to publish it or send it back to the contributor! # Bundle file editors, please note that "notification.welcome" is used in a unit test. -notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address. +notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. +notification.welcomeConfirmEmail=Also, check for your welcome email to verify your address. notification.demoSite=Demo Site notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}). notification.grantFileAccess=Access granted for files in dataset: {0}. diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index 7fe5c43054f..4a369c0d431 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -80,6 +80,8 @@ #{bundle['notification.demoSite']} + +
    diff --git a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java index b3b80397eee..95deafc0cfe 100644 --- a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java @@ -141,7 +141,8 @@ public void testWelcomeInAppNotification(TestInfo testInfo) { "LibraScholar", "User Guide", "Demo Site" - )); + )) + + " " + BundleUtil.getStringFromBundle("notification.welcomeConfirmEmail"); log.fine("message: " + message); assertEquals("Welcome to LibraScholar! Get started by adding or finding data. " + "Have questions? Check out the User Guide." From 2bb334f22e94b3c2eb150b554935ebbff2daf0b9 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Fri, 8 Apr 2022 14:40:50 -0400 Subject: [PATCH 172/366] use namespace in parsing xml:lang --- .../harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index b3bbb3b613c..839848f3e71 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -190,7 +190,7 @@ private void processDDI(ImportType importType, XMLStreamReader xmlr, DatasetDTO String codeBookLevelId = xmlr.getAttributeValue(null, "ID"); //Include metadataLanguage from an xml:lang attribute if present (null==undefined) - String metadataLanguage= xmlr.getAttributeValue(null, "xml:lang"); + String metadataLanguage= xmlr.getAttributeValue("xml", "lang"); datasetDTO.setMetadataLanguage(metadataLanguage); // (but first we will parse and process the entire DDI - and only From afc87b4466609ff03eb31da3aceb938401727c1c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 8 Apr 2022 15:14:14 -0400 Subject: [PATCH 173/366] remove empty "Account Information" dropdown for Shib users #8223 --- src/main/webapp/dataverseuser.xhtml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml index 4a369c0d431..bbb7b7b0bc6 100644 --- a/src/main/webapp/dataverseuser.xhtml +++ b/src/main/webapp/dataverseuser.xhtml @@ -381,15 +381,12 @@ -
    +
    - - -
    + +
    #{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.warning.outofcompliance']}
    diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index bee27e7cd6b..774ec1b865a 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -762,12 +762,13 @@ + + + + diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index fafcf85e7f9..6a32d581e8e 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -76,9 +76,9 @@
    + + From 47c1413f9290088dc6d0dfd11096ae8a57b599a6 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 12 Apr 2022 10:00:52 -0400 Subject: [PATCH 198/366] link to tabular ingest docs #8525 --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c607013cf29..816a2ae3988 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1301,7 +1301,7 @@ When adding a file to a dataset, you can optionally specify the following: - A description of the file. - The "File Path" of the file, indicating which folder the file should be uploaded to within the dataset. - Whether or not the file is restricted. -- Whether or not the file skips tabular ingest. If tabIngest parameter is not specified then it defaults to ``true``. +- Whether or not the file skips :doc:`tabular ingest `. If the ``tabIngest`` parameter is not specified, it defaults to ``true``. Note that when a Dataverse instance is configured to use S3 storage with direct upload enabled, there is API support to send a file directly to S3. This is more complex and is described in the :doc:`/developers/s3-direct-upload-api` guide. From 66b774bc2d16308be16158b0da883344131f7db5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 12 Apr 2022 10:01:36 -0400 Subject: [PATCH 199/366] add release note #8525 --- doc/release-notes/8525-ingest-optional-skip.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/8525-ingest-optional-skip.md diff --git a/doc/release-notes/8525-ingest-optional-skip.md b/doc/release-notes/8525-ingest-optional-skip.md new file mode 100644 index 00000000000..dfec1336ea3 --- /dev/null +++ b/doc/release-notes/8525-ingest-optional-skip.md @@ -0,0 +1 @@ +Tabular ingest can be skipped via API. From f55578ade95d7e6b2da4aa5fba361584b3b95624 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 12 Apr 2022 18:08:34 -0400 Subject: [PATCH 200/366] update existing schema and code to make all cvv files multival in solr --- conf/solr/8.11.1/schema.xml | 2 +- src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/8.11.1/schema.xml index 68ffe567193..ead6e882d49 100644 --- a/conf/solr/8.11.1/schema.xml +++ b/conf/solr/8.11.1/schema.xml @@ -315,7 +315,7 @@ - + diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java index a092cdad784..d25d8428902 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java @@ -543,7 +543,7 @@ public SolrField getSolrField() { boolean makeSolrFieldMultivalued; // http://stackoverflow.com/questions/5800762/what-is-the-use-of-multivalued-field-type-in-solr - if (allowMultiples || parentAllowsMultiplesBoolean) { + if (allowMultiples || parentAllowsMultiplesBoolean || isControlledVocabulary()) { makeSolrFieldMultivalued = true; } else { makeSolrFieldMultivalued = false; From 1ee29e41194d0f1da9e1c504639f883053f775a0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 12 Apr 2022 18:25:24 -0400 Subject: [PATCH 201/366] release note --- doc/release-notes/8595-cvv-field-solr-update.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/8595-cvv-field-solr-update.md diff --git a/doc/release-notes/8595-cvv-field-solr-update.md b/doc/release-notes/8595-cvv-field-solr-update.md new file mode 100644 index 00000000000..68f6f555497 --- /dev/null +++ b/doc/release-notes/8595-cvv-field-solr-update.md @@ -0,0 +1,3 @@ +Controlled vocabulary fields that do not allow multiple entries are not indexed properly in Dataverse instances configured to support multiple languages. This release fixes the schema.xml file for the one field affected in the standard metadata blocks (journalArticleType) and updates the api/admin/index/solr/schema to provide the correct information for use with the update-fields.sh script described in the [Metadata Customization section of the Admin Guide](https://guides.dataverse.org/en/latest/admin/metadatacustomization.html#updating-the-solr-schema). + +The release should include updating the schema.xml file for solr or running the update-fields.sh script and reindexing (whatever standard instructions we give for schema changes.) \ No newline at end of file From 3dfb0c316227708856f755ab3e5603134e5d6a36 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 12 Apr 2022 19:23:42 -0400 Subject: [PATCH 202/366] document setting/function --- .../source/developers/workflows.rst | 5 +++-- doc/sphinx-guides/source/installation/config.rst | 16 +++++++++++----- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/doc/sphinx-guides/source/developers/workflows.rst b/doc/sphinx-guides/source/developers/workflows.rst index c982edc08bb..c3ad039271f 100644 --- a/doc/sphinx-guides/source/developers/workflows.rst +++ b/doc/sphinx-guides/source/developers/workflows.rst @@ -180,7 +180,7 @@ archiver A step that sends an archival copy of a Dataset Version to a configured archiver, e.g. the DuraCloud interface of Chronopolis. See the `DuraCloud/Chronopolis Integration documentation `_ for further detail. -Note - the example step includes two settings required for any archiver and three (DuraCloud*) that are specific to DuraCloud. +Note - the example step includes two settings required for any archiver, three (DuraCloud*) that are specific to DuraCloud, and the optional BagGeneratorThreads setting that controls parallelism when creating the Bag. .. code:: json @@ -196,7 +196,8 @@ Note - the example step includes two settings required for any archiver and thre ":ArchiverSettings": "string", ":DuraCloudHost":"string", ":DuraCloudPort":"string", - ":DuraCloudContext":"string" + ":DuraCloudContext":"string", + ":BagGeneratorThreads" } } diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 7ed9fe1327d..2f098c25e36 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -932,7 +932,7 @@ The minimal configuration to support an archiver integration involves adding a m \:ArchiverSettings - the archiver class can access required settings including existing Dataverse installation settings and dynamically defined ones specific to the class. This setting is a comma-separated list of those settings. For example\: -``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":DuraCloudHost, :DuraCloudPort, :DuraCloudContext"`` +``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":DuraCloudHost, :DuraCloudPort, :DuraCloudContext, :BagGeneratorThreads"`` The DPN archiver defines three custom settings, one of which is required (the others have defaults): @@ -942,6 +942,12 @@ The DPN archiver defines three custom settings, one of which is required (the ot :DuraCloudPort and :DuraCloudContext are also defined if you are not using the defaults ("443" and "duracloud" respectively). (Note\: these settings are only in effect if they are listed in the \:ArchiverSettings. Otherwise, they will not be passed to the DuraCloud Archiver class.) +It also can use one setting that is common to all Archivers: :BagGeneratorThreads + +``curl http://localhost:8080/api/admin/settings/:BagGenerator -X PUT -d '8'`` + +By default, the Bag generator zips two datafiles at a time when creating the Bag. This setting can be used to lower that to 1, i.e. to decrease system load, or to increase it, e.g. to 4 or 8, to speed processing of many small files. + Archivers may require JVM options as well. For the Chronopolis archiver, the username and password associated with your organization's Chronopolis/DuraCloud account should be configured in Payara: ``./asadmin create-jvm-options '-Dduracloud.username=YOUR_USERNAME_HERE'`` @@ -963,9 +969,9 @@ ArchiverClassName - the fully qualified class to be used for archiving. For exam \:ArchiverSettings - the archiver class can access required settings including existing Dataverse installation settings and dynamically defined ones specific to the class. This setting is a comma-separated list of those settings. For example\: -``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":BagItLocalPath"`` +``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":BagItLocalPath, , :BagGeneratorThreads"`` -:BagItLocalPath is the file path that you've set in :ArchiverSettings. +:BagItLocalPath is the file path that you've set in :ArchiverSettings. See the DuraCloud archiver section for a description of :BagGeneratorThreads. .. _Google Cloud Configuration: @@ -976,9 +982,9 @@ The Google Cloud Archiver can send Dataverse Project Bags to a bucket in Google' ``curl http://localhost:8080/api/admin/settings/:ArchiverClassName -X PUT -d "edu.harvard.iq.dataverse.engine.command.impl.GoogleCloudSubmitToArchiveCommand"`` -``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":GoogleCloudBucket, :GoogleCloudProject"`` +``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":GoogleCloudBucket, :GoogleCloudProject, :BagGeneratorThreads"`` -The Google Cloud Archiver defines two custom settings, both are required. The credentials for your account, in the form of a json key file, must also be obtained and stored locally (see below): +The Google Cloud Archiver defines two custom settings, both are required. It can also use the :BagGeneratorThreads setting as described in the DuraCloud Archiver section above. The credentials for your account, in the form of a json key file, must also be obtained and stored locally (see below): In order to use the Google Cloud Archiver, you must have a Google account. You will need to create a project and bucket within that account and provide those values in the settings: From a9f3aa50a1b65b1be524c54fc9015916633bbe7d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 13 Apr 2022 16:32:13 -0400 Subject: [PATCH 203/366] remove thread config code --- .../edu/harvard/iq/dataverse/util/bagit/BagGenerator.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java index 08a46f523c2..3b0b9f27ea4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java @@ -116,7 +116,6 @@ public class BagGenerator { private boolean usetemp = false; private int numConnections = 8; - public static final String BAG_GENERATOR_THREADS = ":BagGeneratorThreads"; private OREMap oremap; @@ -1107,8 +1106,4 @@ public void setAuthenticationKey(String tokenString) { apiKey = tokenString; } - public void setNumConnections(int numConnections) { - this.numConnections = numConnections; - } - } \ No newline at end of file From cd7602b5274ac8d23eff669fbf86ca4eb45a7c8d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Wed, 13 Apr 2022 18:23:34 -0400 Subject: [PATCH 204/366] documentation update per review --- doc/sphinx-guides/source/admin/integrations.rst | 12 +++++++----- .../source/developers/workflows.rst | 4 ++-- .../source/installation/config.rst | 17 ++++++++++++----- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index 5ee6372d56d..e748ec13057 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -12,7 +12,7 @@ Getting Data In A variety of integrations are oriented toward making it easier for your researchers to deposit data into your Dataverse installation. GitHub -+++++++ +++++++ Dataverse integration with GitHub is implemented via a Dataverse Uploader GitHub Action. It is a reusable, composite workflow for uploading a git repository or subdirectory into a dataset on a target Dataverse installation. The action is customizable, allowing users to choose to replace a dataset, add to the dataset, publish it or leave it as a draft version on Dataverse. The action provides some metadata to the dataset, such as the origin GitHub repository, and it preserves the directory tree structure. @@ -157,12 +157,14 @@ Archivematica Sponsored by the `Ontario Council of University Libraries (OCUL) `_, this technical integration enables users of Archivematica to select datasets from connected Dataverse installations and process them for long-term access and digital preservation. For more information and list of known issues, please refer to Artefactual's `release notes `_, `integration documentation `_, and the `project wiki `_. -DuraCloud/Chronopolis -+++++++++++++++++++++ +.. _rda-bagit-archiving: + +RDA BagIt (BagPack) Archiving ++++++++++++++++++++++++++++++ -A Dataverse installation can be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant `_ zipped `BagIt `_ bags to the `Chronopolis `_ via `DuraCloud `_ +A Dataverse installation can be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant `_ zipped `BagIt `_ bags to the `Chronopolis `_ via `DuraCloud `_, to a local file system, or to `Google Cloud Storage`_. -For details on how to configure this integration, look for "DuraCloud/Chronopolis" in the :doc:`/installation/config` section of the Installation Guide. +For details on how to configure this integration, see :ref:`:BagIt Export` in the :doc:`/installation/config` section of the Installation Guide. Future Integrations ------------------- diff --git a/doc/sphinx-guides/source/developers/workflows.rst b/doc/sphinx-guides/source/developers/workflows.rst index c3ad039271f..5efdb3a5370 100644 --- a/doc/sphinx-guides/source/developers/workflows.rst +++ b/doc/sphinx-guides/source/developers/workflows.rst @@ -178,7 +178,7 @@ Available variables are: archiver ++++++++ -A step that sends an archival copy of a Dataset Version to a configured archiver, e.g. the DuraCloud interface of Chronopolis. See the `DuraCloud/Chronopolis Integration documentation `_ for further detail. +A step that sends an archival copy of a Dataset Version to a configured archiver, e.g. the DuraCloud interface of Chronopolis. See the :ref:`rda-bagit-archiving` for further detail. Note - the example step includes two settings required for any archiver, three (DuraCloud*) that are specific to DuraCloud, and the optional BagGeneratorThreads setting that controls parallelism when creating the Bag. @@ -197,7 +197,7 @@ Note - the example step includes two settings required for any archiver, three ( ":DuraCloudHost":"string", ":DuraCloudPort":"string", ":DuraCloudContext":"string", - ":BagGeneratorThreads" + ":BagGeneratorThreads":"string" } } diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 2f098c25e36..f890f5312ff 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -944,7 +944,7 @@ The DPN archiver defines three custom settings, one of which is required (the ot It also can use one setting that is common to all Archivers: :BagGeneratorThreads -``curl http://localhost:8080/api/admin/settings/:BagGenerator -X PUT -d '8'`` +``curl http://localhost:8080/api/admin/settings/:BagGeneratorThreads -X PUT -d '8'`` By default, the Bag generator zips two datafiles at a time when creating the Bag. This setting can be used to lower that to 1, i.e. to decrease system load, or to increase it, e.g. to 4 or 8, to speed processing of many small files. @@ -969,9 +969,9 @@ ArchiverClassName - the fully qualified class to be used for archiving. For exam \:ArchiverSettings - the archiver class can access required settings including existing Dataverse installation settings and dynamically defined ones specific to the class. This setting is a comma-separated list of those settings. For example\: -``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":BagItLocalPath, , :BagGeneratorThreads"`` +``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":BagItLocalPath, :BagGeneratorThreads"`` -:BagItLocalPath is the file path that you've set in :ArchiverSettings. See the DuraCloud archiver section for a description of :BagGeneratorThreads. +:BagItLocalPath is the file path that you've set in :ArchiverSettings. See the DuraCloud Configuration section for a description of :BagGeneratorThreads. .. _Google Cloud Configuration: @@ -984,7 +984,7 @@ The Google Cloud Archiver can send Dataverse Project Bags to a bucket in Google' ``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":GoogleCloudBucket, :GoogleCloudProject, :BagGeneratorThreads"`` -The Google Cloud Archiver defines two custom settings, both are required. It can also use the :BagGeneratorThreads setting as described in the DuraCloud Archiver section above. The credentials for your account, in the form of a json key file, must also be obtained and stored locally (see below): +The Google Cloud Archiver defines two custom settings, both are required. It can also use the :BagGeneratorThreads setting as described in the DuraCloud Configuration section above. The credentials for your account, in the form of a json key file, must also be obtained and stored locally (see below): In order to use the Google Cloud Archiver, you must have a Google account. You will need to create a project and bucket within that account and provide those values in the settings: @@ -2406,6 +2406,13 @@ For example, the LocalSubmitToArchiveCommand only uses the :BagItLocalPath setti ``curl -X PUT -d ':BagItLocalPath' http://localhost:8080/api/admin/settings/:ArchiverSettings`` +:BagGeneratorThreads +++++++++++++++++++++ + +An archiver setting shared by several implementations (e.g. DuraCloud, Google, and Local) that can make Bag generation use fewer or more threads in zipping datafiles that the default of 2 + +``curl http://localhost:8080/api/admin/settings/:BagGeneratorThreads -X PUT -d '8'`` + :DuraCloudHost ++++++++++++++ :DuraCloudPort @@ -2421,7 +2428,7 @@ These three settings define the host, port, and context used by the DuraCloudSub This is the local file system path to be used with the LocalSubmitToArchiveCommand class. It is recommended to use an absolute path. See the :ref:`Local Path Configuration` section above. :GoogleCloudBucket -++++++++++++++++++ +++++++++++++++++++ :GoogleCloudProject +++++++++++++++++++ From 36aa64ccfcc7e33cf861011860ebc83d13f25dfc Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 14 Apr 2022 10:14:05 -0400 Subject: [PATCH 205/366] add required space --- doc/sphinx-guides/source/admin/integrations.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index e748ec13057..d958b2d77d6 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -162,7 +162,7 @@ Sponsored by the `Ontario Council of University Libraries (OCUL) `_ zipped `BagIt `_ bags to the `Chronopolis `_ via `DuraCloud `_, to a local file system, or to `Google Cloud Storage`_. +A Dataverse installation can be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant `_ zipped `BagIt `_ bags to the `Chronopolis `_ via `DuraCloud `_, to a local file system, or to `Google Cloud Storage `_. For details on how to configure this integration, see :ref:`:BagIt Export` in the :doc:`/installation/config` section of the Installation Guide. From c3596b8077e6f94cb32593ecee754c51bc3299e0 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 14 Apr 2022 10:37:52 -0400 Subject: [PATCH 206/366] updating docs/release notes --- doc/release-notes/8380-counter-processor-update.md | 1 + doc/sphinx-guides/source/_static/util/counter_daily.sh | 2 +- doc/sphinx-guides/source/installation/prerequisites.rst | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 doc/release-notes/8380-counter-processor-update.md diff --git a/doc/release-notes/8380-counter-processor-update.md b/doc/release-notes/8380-counter-processor-update.md new file mode 100644 index 00000000000..f050bae0f0d --- /dev/null +++ b/doc/release-notes/8380-counter-processor-update.md @@ -0,0 +1 @@ +This release includes support for counter-processor-0.1.04 for processing Make Data Count metrics. If you are running Make Data Counts support, you should reinstall/reconfigure counter-processor as described in the latest Guides. diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh index 597ff0ac737..a12439d9cf8 100644 --- a/doc/sphinx-guides/source/_static/util/counter_daily.sh +++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh @@ -1,6 +1,6 @@ #! /bin/bash -COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.0.1" +COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04" MDC_LOG_DIRECTORY="/usr/local/payara5/glassfish/domains/domain1/logs/mdc" # counter_daily.sh diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 2fe2c7d1922..fd92cb0b855 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -449,7 +449,7 @@ As root, create a "counter" user and change ownership of Counter Processor direc Installing Counter Processor Python Requirements ================================================ -Counter Processor requires Python 3.6.4 or higher. The following commands are intended to be run as root but we are aware that Pythonistas might prefer fancy virtualenv or similar setups. Pull requests are welcome to improve these steps! +Counter Processor requires Python 3.7 or higher. The following commands are intended to be run as root but we are aware that Pythonistas might prefer fancy virtualenv or similar setups. Pull requests are welcome to improve these steps! Enable the EPEL repo if you haven't already:: From 109a4a17e1116374cc11eb0a829dcdb42b0fe9db Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 14 Apr 2022 10:45:59 -0400 Subject: [PATCH 207/366] Update doc/sphinx-guides/source/developers/workflows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/workflows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/workflows.rst b/doc/sphinx-guides/source/developers/workflows.rst index 5efdb3a5370..df63bf239fe 100644 --- a/doc/sphinx-guides/source/developers/workflows.rst +++ b/doc/sphinx-guides/source/developers/workflows.rst @@ -178,7 +178,7 @@ Available variables are: archiver ++++++++ -A step that sends an archival copy of a Dataset Version to a configured archiver, e.g. the DuraCloud interface of Chronopolis. See the :ref:`rda-bagit-archiving` for further detail. +A step that sends an archival copy of a Dataset Version to a configured archiver, e.g. the DuraCloud interface of Chronopolis. See :ref:`rda-bagit-archiving` for further detail. Note - the example step includes two settings required for any archiver, three (DuraCloud*) that are specific to DuraCloud, and the optional BagGeneratorThreads setting that controls parallelism when creating the Bag. From f6cea7c436ad4654f200c04faa0bbd3dab66987a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 14 Apr 2022 10:57:51 -0400 Subject: [PATCH 208/366] typo - remove : --- doc/sphinx-guides/source/admin/integrations.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst index d958b2d77d6..8d3f53981e0 100644 --- a/doc/sphinx-guides/source/admin/integrations.rst +++ b/doc/sphinx-guides/source/admin/integrations.rst @@ -164,7 +164,7 @@ RDA BagIt (BagPack) Archiving A Dataverse installation can be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant `_ zipped `BagIt `_ bags to the `Chronopolis `_ via `DuraCloud `_, to a local file system, or to `Google Cloud Storage `_. -For details on how to configure this integration, see :ref:`:BagIt Export` in the :doc:`/installation/config` section of the Installation Guide. +For details on how to configure this integration, see :ref:`BagIt Export` in the :doc:`/installation/config` section of the Installation Guide. Future Integrations ------------------- From b7c99978f7db0d35fd344c0ac6cf3ae5514edf1e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 14 Apr 2022 11:17:26 -0400 Subject: [PATCH 209/366] #8191 update popup validation --- src/main/java/propertyFiles/Bundle.properties | 9 ++++--- src/main/webapp/dataset.xhtml | 22 +++++++++++++++- .../webapp/file-edit-popup-fragment.xhtml | 26 ++++++++----------- 3 files changed, 37 insertions(+), 20 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 95179c87d9e..31a58ae65b1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1780,7 +1780,7 @@ file.dataFilesTab.terms.list.termsOfAccess.description.line.2=Learn about restri file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. -file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access for Restricted Files file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can access restricted files in this Dataset file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. @@ -2148,10 +2148,11 @@ ingest.csv.nullStream=Stream can't be null. citationFrame.banner.countdownMessage.seconds=seconds #file-edit-popup-fragment.xhtml #editFilesFragment.xhtml -dataset.access.accessHeader=Restrict Files and Define Data Access +dataset.access.accessHeader=Restrict Access dataset.access.accessHeader.invalid.state=Define Data Access -dataset.access.description=Restricting limits access to published files. People who want to use the restricted files can request access by default. If you disable request access, you must add information about access to the Terms of Access field. -dataset.access.description.line.2=These settings can be changed when you edit the dataset. Learn about restricting files and dataset access in the User Guide. +dataset.access.description=Restricting limits access to published files. People who want to use the restricted files can request access by default. +dataset.access.description.disable=If you disable request access, you must add information about access to the Terms of Access field. +dataset.access.description.line.2=Learn about restricting files and dataset access in the User Guide. #datasetFieldForEditFragment.xhtml dataset.AddReplication=Add "Replication Data for" to Title diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 774ec1b865a..ce0c1774350 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -111,6 +111,11 @@ + + + + + @@ -1960,7 +1965,22 @@ if (outcome ==='GuestbookRequired'){ PF('downloadPopup').show(); } - } + } + + function testTOA() { + + var termsofAccessHidden = document.getElementById("datasetForm:termsofAccessHidden").value; + var fileAccessRequestHidden = document.getElementById("datasetForm:fileAccessRequestHidden").value; + + + + if (fileAccessRequestHidden === 'false' && termsofAccessHidden === '') { + alert('invalidTermsofAccessHidden'); + } else { + PF('accessPopup').hide(); + } + } + //]]> diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index 6a32d581e8e..ad34b09c2a1 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -32,29 +32,34 @@

    +

    + + + +
    -
    - - + +
    #{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.warning.outofcompliance']}
    @@ -79,7 +84,7 @@ onclick="testTOA();" - update=":messagePanel" /> + update=":messagePanel,popupWarning" /> @@ -197,16 +202,7 @@ From b7eafecb5d574268ddc01af8dca72bb033a0ea29 Mon Sep 17 00:00:00 2001 From: "don.sizemore" Date: Thu, 14 Apr 2022 12:36:18 -0400 Subject: [PATCH 210/366] #8390 Counter Processor v0.1.04 requires Python 3.7 or higher. --- .../source/installation/prerequisites.rst | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index fd92cb0b855..ef3c4b0c106 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -416,7 +416,7 @@ Counter Processor is required to enable Make Data Count metrics in a Dataverse i Installing Counter Processor ============================ -Counter Processor has only been tested on el7 (see "Linux" above). Please note that a scripted installation using Ansible is mentioned in the :doc:`/developers/make-data-count` section of the Developer Guide. +A scripted installation using Ansible is mentioned in the :doc:`/developers/make-data-count` section of the Developer Guide. As root, download and install Counter Processor:: @@ -449,19 +449,17 @@ As root, create a "counter" user and change ownership of Counter Processor direc Installing Counter Processor Python Requirements ================================================ -Counter Processor requires Python 3.7 or higher. The following commands are intended to be run as root but we are aware that Pythonistas might prefer fancy virtualenv or similar setups. Pull requests are welcome to improve these steps! +Counter Processor version 0.1.04 requires Python 3.7 or higher. This version of Python is available in many operating systems, and is purportedly available for RHEL7 or CentOS 7 via Red Hat Software Collections. Alternately, one may compile it from source. -Enable the EPEL repo if you haven't already:: +The following commands are intended to be run as root but we are aware that Pythonistas might prefer fancy virtualenv or similar setups. Pull requests are welcome to improve these steps! - yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +Install Python 3.9:: -Install Python 3.6:: - - yum install python36 + yum install python39 Install Counter Processor Python requirements:: - python3.6 -m ensurepip + python3.9 -m ensurepip cd /usr/local/counter-processor-0.1.04 pip3 install -r requirements.txt From 3d6c2b45a0ae21d8ab4ae2e83b39971b217aacf5 Mon Sep 17 00:00:00 2001 From: "don.sizemore" Date: Thu, 14 Apr 2022 13:23:52 -0400 Subject: [PATCH 211/366] #8380 bump admin/ and developers/ Python3.6 documentation --- doc/sphinx-guides/source/admin/make-data-count.rst | 2 +- doc/sphinx-guides/source/developers/make-data-count.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index 3f1b04c3c36..8a96e949ff9 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -112,7 +112,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single * Run Counter Processor. - * ``CONFIG_FILE=counter-processor-config.yaml python36 main.py`` + * ``CONFIG_FILE=counter-processor-config.yaml python39 main.py`` * A JSON file in SUSHI format will be created in the directory you specified under "output_file" in the config file. diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst index 9c6095136b8..a3c0d10dc5e 100644 --- a/doc/sphinx-guides/source/developers/make-data-count.rst +++ b/doc/sphinx-guides/source/developers/make-data-count.rst @@ -53,7 +53,7 @@ Once you are done with your configuration, you can run Counter Processor like th ``cd /usr/local/counter-processor-0.1.04`` -``CONFIG_FILE=counter-processor-config.yaml python36 main.py`` +``CONFIG_FILE=counter-processor-config.yaml python39 main.py`` (Please note that the Counter Processor README says you can also pass in values like ``START_DATE``, ``END_DATE`` etc. at the command line if you find this to be more convenient.) From 6d4d16bc2a27061a0718740c6fe7e6f7fe2ab9ff Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 14 Apr 2022 14:51:23 -0400 Subject: [PATCH 212/366] change platform, update notes for upgraders --- doc/release-notes/8380-counter-processor-update.md | 2 +- scripts/vagrant/counter-processor-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/8380-counter-processor-update.md b/doc/release-notes/8380-counter-processor-update.md index f050bae0f0d..8c8bd400d85 100644 --- a/doc/release-notes/8380-counter-processor-update.md +++ b/doc/release-notes/8380-counter-processor-update.md @@ -1 +1 @@ -This release includes support for counter-processor-0.1.04 for processing Make Data Count metrics. If you are running Make Data Counts support, you should reinstall/reconfigure counter-processor as described in the latest Guides. +This release includes support for counter-processor-0.1.04 for processing Make Data Count metrics. If you are running Make Data Counts support, you should reinstall/reconfigure counter-processor as described in the latest Guides. (For existing installations, note that counter-processor-0.1.04 requires a newer version of python so you will need to follow the full counter-processor install. Also note that if you configure the new version the same way, it will reprocess the days in the current month when it is first run. This is normal and will not affect the metrics in Dataverse.) diff --git a/scripts/vagrant/counter-processor-config.yaml b/scripts/vagrant/counter-processor-config.yaml index 4227de89f40..445c47827f1 100644 --- a/scripts/vagrant/counter-processor-config.yaml +++ b/scripts/vagrant/counter-processor-config.yaml @@ -44,7 +44,7 @@ output_file: /dataverse/sushi_sample_logs output_format: json # the name of the platform that goes into your reports -platform: Dash +platform: Dataverse # Don't put your api token in here if you're going to commit it, but put in separate secrets.yaml in same # directory as the config or else set a environment variable when starting up in order to override the key. From cd068f6d42257a8723d02af1ec5d1e7c7cbff42d Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 14 Apr 2022 16:25:41 -0400 Subject: [PATCH 213/366] #8191 fix for filepage and terms --- src/main/webapp/dataset-license-terms.xhtml | 5 ++ src/main/webapp/dataset.xhtml | 50 ++++++++----------- .../webapp/file-edit-popup-fragment.xhtml | 29 ++++++++--- 3 files changed, 46 insertions(+), 38 deletions(-) diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 049bfbd15d0..a5930ccf35f 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -347,6 +347,11 @@
    + + + + + diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index ce0c1774350..f42e0bef7a5 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -111,11 +111,7 @@ - - - - - + @@ -764,16 +760,17 @@
    - - - - - + action="#{DatasetPage.save}" + /> + @@ -927,14 +924,9 @@
    + oncomplete="javascript:bind_bsui_components();$(document).scrollTop(0);"/> @@ -1876,6 +1868,7 @@ + diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index ad34b09c2a1..a2d3a8eae59 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -3,6 +3,7 @@ xmlns:f="http://java.sun.com/jsf/core" xmlns:ui="http://java.sun.com/jsf/facelets" xmlns:p="http://primefaces.org/ui" + xmlns:o="http://omnifaces.org/ui" xmlns:jsf="http://xmlns.jcp.org/jsf"> @@ -41,7 +42,8 @@

    - + +
    @@ -80,9 +82,7 @@
    -
    - + From 018808470296a997c000af49443dadfeaf41f166 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 14 Apr 2022 16:39:21 -0400 Subject: [PATCH 214/366] #8191 resolve merge --- src/main/webapp/dataset.xhtml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 471bda2471c..278b18c7405 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -762,15 +762,15 @@ - +
    @@ -857,7 +857,7 @@ rendered="#{(!DatasetPage.workingVersion.deaccessioned or (DatasetPage.workingVersion.deaccessioned and DatasetPage.canUpdateDataset())) and (empty DatasetPage.editMode or DatasetPage.editMode == 'METADATA')}">
    - - + oncomplete="$(document).scrollTop(0);"/> + @@ -1861,10 +1861,10 @@
    - + - + From d1fa0ceb5010463f4fcc7eb950ff2d764a55d9af Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 14 Apr 2022 17:04:38 -0400 Subject: [PATCH 215/366] #8191 fix create ds --- src/main/webapp/dataset.xhtml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 278b18c7405..fb01c527823 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1960,8 +1960,16 @@ } function testTOADatasetPage() { - var termsofAccessHidden = document.getElementById("datasetForm:tabView:termsofAccessHiddenLT").value; - var fileAccessRequestHidden = document.getElementById("datasetForm:tabView:fileAccessRequestHiddenLT").value; + var termsofAccessHidden, fileAccessRequestHidden; + try{ + termsofAccessHidden = document.getElementById("datasetForm:tabView:termsofAccessHiddenLT").value; + fileAccessRequestHidden = document.getElementById("datasetForm:tabView:fileAccessRequestHiddenLT").value; + } + catch (error){ + //terms not present so save... + PF('blockDatasetForm').show(); + datasetSaveCommand(); + } if (fileAccessRequestHidden === 'false' && termsofAccessHidden === '') { //Not compliant show error keep page open... } else { From ebec3bd017f0bd6b8f3fb456a71e44667e10863c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 15 Apr 2022 10:50:57 -0400 Subject: [PATCH 216/366] #8191 fix success message posting --- src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 2 +- src/main/webapp/editFilesFragment.xhtml | 2 +- src/main/webapp/file-edit-popup-fragment.xhtml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index a6192e114b6..c4d3f51c86a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -737,7 +737,7 @@ public void restrictFiles(boolean restricted) throws UnsupportedOperationExcepti String successMessage = getBundleString("file.restricted.success"); logger.fine(successMessage); successMessage = successMessage.replace("{0}", fileNames); - JsfHelper.addFlashMessage(successMessage); + JsfHelper.addSuccessMessage(successMessage); } } diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 01e6832428e..e281f406bc7 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -445,7 +445,7 @@
  • + oncomplete="PF('accessPopup').show();"> diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index a2d3a8eae59..8f8de725bdc 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -84,7 +84,7 @@
    + update="popupWarning" /> @@ -198,7 +198,7 @@
    - + -