From 2dbab881d113a9ebb95399babe5e4e34370bca28 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 29 Aug 2018 17:11:16 -0400 Subject: [PATCH 001/114] #4410 Dataset linking prelim check in --- .../edu/harvard/iq/dataverse/DatasetPage.java | 19 +++++++ .../edu/harvard/iq/dataverse/Dataverse.java | 3 +- .../iq/dataverse/DataverseConverter.java | 2 + .../iq/dataverse/DataverseServiceBean.java | 51 +++++++++++++++++++ src/main/webapp/dataset.xhtml | 20 ++++++++ 5 files changed, 94 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 5e6b61a896a..f3ec2470c6e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2113,6 +2113,16 @@ public void setSelectedFiles(List selectedFiles) { this.selectedFiles = selectedFiles; } + private List selectedDataversesForLinking = new ArrayList<>(); + + public List getSelectedDataversesForLinking() { + return selectedDataversesForLinking; + } + + public void setSelectedDataversesForLinking(List selectedDataversesForLinking) { + this.selectedDataversesForLinking = selectedDataversesForLinking; + } + private List selectedRestrictedFiles; // = new ArrayList<>(); public List getSelectedRestrictedFiles() { @@ -2308,6 +2318,15 @@ public String saveLinkedDataset() { } return returnToLatestVersion(); } + + public List completeLinkingDataverse(String query) { + List prelim = dataverseService.filterDataversesForLinking(query); + for (Dataverse res : prelim){ + System.out.print("Again: " + res.getDisplayName()); + } + + return prelim; + } List previouslyRestrictedFiles = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index ac2049eb0a2..433477936d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -44,7 +44,8 @@ @NamedQuery(name = "Dataverse.findRoot", query = "SELECT d FROM Dataverse d where d.owner.id=null"), @NamedQuery(name = "Dataverse.findByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias)=:alias"), @NamedQuery(name = "Dataverse.filterByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias) LIKE :alias order by dv.alias"), - @NamedQuery(name = "Dataverse.filterByAliasNameAffiliation", query="SELECT dv FROM Dataverse dv WHERE (LOWER(dv.alias) LIKE :alias) OR (LOWER(dv.name) LIKE :name) OR (LOWER(dv.affiliation) LIKE :affiliation) order by dv.alias") + @NamedQuery(name = "Dataverse.filterByAliasNameAffiliation", query="SELECT dv FROM Dataverse dv WHERE (LOWER(dv.alias) LIKE :alias) OR (LOWER(dv.name) LIKE :name) OR (LOWER(dv.affiliation) LIKE :affiliation) order by dv.alias"), + @NamedQuery(name = "Dataverse.filterByName", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.name) LIKE :name order by dv.alias") }) @Entity @Table(indexes = {@Index(columnList="defaultcontributorrole_id") diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java index 685ec8132b3..af9604d53ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java @@ -31,9 +31,11 @@ public Object getAsObject(FacesContext facesContext, UIComponent component, Stri @Override public String getAsString(FacesContext facesContext, UIComponent component, Object value) { + System.out.print("Not casted " + value); if (value == null || value.equals("")) { return ""; } else { + System.out.print("get as string " + ((Dataverse) value).getDisplayName()); return ((Dataverse) value).getId().toString(); //return ((Dataverse) value).getAlias(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index d698c71b7f9..c1ee684fcf2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -5,6 +5,10 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.authorization.groups.Group; +import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; +import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.search.IndexServiceBean; @@ -22,6 +26,7 @@ import java.util.ResourceBundle; import java.util.MissingResourceException; import java.util.Properties; +import java.util.Set; import java.util.concurrent.Future; import java.util.jar.Attributes; import java.util.jar.Manifest; @@ -34,6 +39,7 @@ import javax.persistence.NonUniqueResultException; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; +import org.apache.commons.lang.StringUtils; /** * @@ -49,7 +55,13 @@ public class DataverseServiceBean implements java.io.Serializable { @EJB DatasetServiceBean datasetService; + + @EJB + GroupServiceBean groupSvc; + @EJB + ExplicitGroupServiceBean explicitGroupSvc; + @EJB DataverseLinkingServiceBean dataverseLinkingService; @@ -441,6 +453,45 @@ public List filterByAliasQuery(String filterQuery) { return ret; } + public List filterDataversesForLinking(String query) { + List dataverseList = new ArrayList<>(); + + // we get the users through a query that does the filtering through the db, + // so that we don't have to instantiate all of the RoleAssignee objects + /* + em.createNamedQuery("Dataverse.filterByName", Dataverse.class) + .setParameter("name", "%" + query + "%") + .getResultList().stream() + .filter(dv -> dataverseList == null || !dataverseList.contains(dv)) + .forEach((dv) -> { + dataverseList.add(dv); + }); + */ + List results = em.createNamedQuery("Dataverse.filterByName", Dataverse.class) + .setParameter("name", "%" + query + "%") + .getResultList(); + + for (Dataverse res : results){ + System.out.print(res.getDisplayName()); + dataverseList.add(res); + } + + // now we add groups to the list, both global and explicit + /* + Set groups = groupSvc.findGlobalGroups(); + groups.addAll(explicitGroupSvc.findAvailableFor(dvObject)); + groups.stream() + .filter(ra -> StringUtils.containsIgnoreCase(ra.getDisplayInfo().getTitle(), query) + || StringUtils.containsIgnoreCase(ra.getIdentifier(), query)) + .filter(ra -> roleAssignSelectedRoleAssignees == null || !roleAssignSelectedRoleAssignees.contains(ra)) + .forEach((ra) -> { + roleAssigneeList.add(ra); + }); +*/ + + return dataverseList; + } + /** * Used to identify and properly display Harvested objects on the dataverse page. * diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 96844ed5119..327bfb42a7c 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1263,6 +1263,7 @@ +

#{bundle['dataverse.link.no.choice']}

@@ -1287,6 +1288,25 @@ +
+ + + + + + + + + + +
From 60faa1cc722bd9bea5e1da89cdb147429ec0cd9d Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 30 Aug 2018 10:48:43 -0400 Subject: [PATCH 002/114] #4410 Add permissions remove debug code --- .../edu/harvard/iq/dataverse/DatasetPage.java | 11 +++-- .../iq/dataverse/DataverseConverter.java | 2 - .../iq/dataverse/DataverseServiceBean.java | 42 ++++++------------- src/main/webapp/dataset.xhtml | 12 +++--- 4 files changed, 23 insertions(+), 44 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 5fc836d247c..e90bc82e6d0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2321,12 +2321,11 @@ public String saveLinkedDataset() { } public List completeLinkingDataverse(String query) { - List prelim = dataverseService.filterDataversesForLinking(query); - for (Dataverse res : prelim){ - System.out.print("Again: " + res.getDisplayName()); - } - - return prelim; + if (session.getUser().isAuthenticated()) { + return dataverseService.filterDataversesForLinking(query, (AuthenticatedUser) session.getUser()); + } else { + return null; + } } List previouslyRestrictedFiles = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java index af9604d53ba..685ec8132b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java @@ -31,11 +31,9 @@ public Object getAsObject(FacesContext facesContext, UIComponent component, Stri @Override public String getAsString(FacesContext facesContext, UIComponent component, Object value) { - System.out.print("Not casted " + value); if (value == null || value.equals("")) { return ""; } else { - System.out.print("get as string " + ((Dataverse) value).getDisplayName()); return ((Dataverse) value).getId().toString(); //return ((Dataverse) value).getAlias(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index c1ee684fcf2..6aed0a9ede8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; @@ -68,6 +69,9 @@ public class DataverseServiceBean implements java.io.Serializable { @EJB DatasetLinkingServiceBean datasetLinkingService; + @EJB + PermissionServiceBean permissionService; + @EJB SystemConfig systemConfig; @@ -453,41 +457,19 @@ public List filterByAliasQuery(String filterQuery) { return ret; } - public List filterDataversesForLinking(String query) { + public List filterDataversesForLinking(String query, AuthenticatedUser user) { + List dataverseList = new ArrayList<>(); - // we get the users through a query that does the filtering through the db, - // so that we don't have to instantiate all of the RoleAssignee objects - /* - em.createNamedQuery("Dataverse.filterByName", Dataverse.class) - .setParameter("name", "%" + query + "%") - .getResultList().stream() - .filter(dv -> dataverseList == null || !dataverseList.contains(dv)) - .forEach((dv) -> { - dataverseList.add(dv); - }); - */ - List results = em.createNamedQuery("Dataverse.filterByName", Dataverse.class) + List results = em.createNamedQuery("Dataverse.filterByName", Dataverse.class) .setParameter("name", "%" + query + "%") .getResultList(); - - for (Dataverse res : results){ - System.out.print(res.getDisplayName()); - dataverseList.add(res); - } - // now we add groups to the list, both global and explicit - /* - Set groups = groupSvc.findGlobalGroups(); - groups.addAll(explicitGroupSvc.findAvailableFor(dvObject)); - groups.stream() - .filter(ra -> StringUtils.containsIgnoreCase(ra.getDisplayInfo().getTitle(), query) - || StringUtils.containsIgnoreCase(ra.getIdentifier(), query)) - .filter(ra -> roleAssignSelectedRoleAssignees == null || !roleAssignSelectedRoleAssignees.contains(ra)) - .forEach((ra) -> { - roleAssigneeList.add(ra); - }); -*/ + for (Dataverse res : results) { + if (this.permissionService.userOn(user, res).has(Permission.PublishDataverse)) { + dataverseList.add(res); + } + } return dataverseList; } diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 327bfb42a7c..ddf32baac90 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -184,7 +184,7 @@ - + styleClass="DropdownPopup" + var="dataverseLk" itemLabel="#{dataverseLk.displayName}" itemValue="#{dataverseLk}" converter="dataverseConverter"> - - + + - + From 7bb8b99b0d64c9e270624e44c41a815dfeac2a33 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 30 Aug 2018 16:09:59 -0400 Subject: [PATCH 003/114] #4410 filter used links fix messaging --- src/main/java/Bundle.properties | 1 + .../edu/harvard/iq/dataverse/DatasetPage.java | 40 ++++++++++++++++++- .../iq/dataverse/DataverseServiceBean.java | 17 ++++++-- .../command/impl/LinkDatasetCommand.java | 3 +- src/main/webapp/dataset.xhtml | 11 ++--- 5 files changed, 59 insertions(+), 13 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 9f13bf4fdb7..5724ccaad00 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -683,6 +683,7 @@ dataverse.link.save=Save Linked Dataverse dataset.link.save=Save Linked Dataset dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. dataverse.link.dataset.choose=Choose which of your dataverses you would like to link this dataset to. +dataverse.link.dataset.none=No linkable dataverses available. dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Click on the Add Data button on the homepage to get started. dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index e90bc82e6d0..4b0ac537f48 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2285,13 +2285,48 @@ public void updateFileCounts(){ } - private List getSuccessMessageArguments() { + private List getSuccessMessageArguments() { List arguments = new ArrayList<>(); arguments.add(StringEscapeUtils.escapeHtml(dataset.getDisplayName())); String linkString = "" + StringEscapeUtils.escapeHtml(linkingDataverse.getDisplayName()) + ""; arguments.add(linkString); return arguments; } + + public String saveLinkingDataverses(){ + if (selectedDataversesForLinking == null || selectedDataversesForLinking.isEmpty()) { + JsfHelper.addFlashMessage(BundleUtil.getStringFromBundle("dataverse.link.select")); + return ""; + } + for (Dataverse dv : selectedDataversesForLinking){ + saveLink(dv); + } + return ""; + } + + private void saveLink(Dataverse dataverse){ + + if (readOnly) { + // Pass a "real", non-readonly dataset the the LinkDatasetCommand: + dataset = datasetService.find(dataset.getId()); + } + LinkDatasetCommand cmd = new LinkDatasetCommand(dvRequestService.getDataverseRequest(), dataverse, dataset); + linkingDataverse = dataverse; + try { + commandEngine.submit(cmd); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.linkSuccess", getSuccessMessageArguments())); + } catch (CommandException ex) { + String msg = "There was a problem linking this dataset to yours: " + ex; + logger.severe(msg); + msg = BundleUtil.getStringFromBundle("dataset.notlinked.msg") + ex; + /** + * @todo how do we get this message to show up in the GUI? + */ + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), msg); + FacesContext.getCurrentInstance().addMessage(null, message); + } + + } public String saveLinkedDataset() { if (linkingDataverseId == null) { @@ -2321,8 +2356,9 @@ public String saveLinkedDataset() { } public List completeLinkingDataverse(String query) { + dataset = datasetService.find(dataset.getId()); if (session.getUser().isAuthenticated()) { - return dataverseService.filterDataversesForLinking(query, (AuthenticatedUser) session.getUser()); + return dataverseService.filterDataversesForLinking(query, (AuthenticatedUser) session.getUser(), dataset); } else { return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 6aed0a9ede8..370cd206169 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -457,7 +457,7 @@ public List filterByAliasQuery(String filterQuery) { return ret; } - public List filterDataversesForLinking(String query, AuthenticatedUser user) { + public List filterDataversesForLinking(String query, AuthenticatedUser user, Dataset dataset) { List dataverseList = new ArrayList<>(); @@ -465,9 +465,20 @@ public List filterDataversesForLinking(String query, AuthenticatedUse .setParameter("name", "%" + query + "%") .getResultList(); + List alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + dataset.getId()).getResultList(); + List remove = new ArrayList<>(); + + if (alreadyLinkeddv_ids != null && !alreadyLinkeddv_ids.isEmpty()) { + alreadyLinkeddv_ids.stream().map((testDVId) -> this.find(testDVId)).forEachOrdered((removeIt) -> { + remove.add(removeIt); + }); + } + for (Dataverse res : results) { - if (this.permissionService.userOn(user, res).has(Permission.PublishDataverse)) { - dataverseList.add(res); + if (!remove.contains(res)) { + if (this.permissionService.userOn(user, res).has(Permission.PublishDataverse)) { + dataverseList.add(res); + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java index ba901c53898..7c006aa4e64 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java @@ -39,10 +39,11 @@ public LinkDatasetCommand(DataverseRequest aRequest, Dataverse dataverse, Datase @Override public DatasetLinkingDataverse execute(CommandContext ctxt) throws CommandException { + /* if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) { throw new PermissionException("Link Dataset can only be called by superusers.", this, Collections.singleton(Permission.PublishDataverse), linkingDataverse); - } + }*/ if (linkedDataset.getOwner().equals(linkingDataverse)) { throw new IllegalCommandException("Can't link a dataset to its dataverse", this); } diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index ddf32baac90..a40fb52376b 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1294,7 +1294,7 @@ minQueryLength="2" queryDelay="1000" value="#{DatasetPage.selectedDataversesForLinking}" completeMethod="#{DatasetPage.completeLinkingDataverse}" - requiredMessage="Selection Required" + emptyMessage="#{bundle['dataverse.link.dataset.none']}" styleClass="DropdownPopup" var="dataverseLk" itemLabel="#{dataverseLk.displayName}" itemValue="#{dataverseLk}" converter="dataverseConverter"> @@ -1317,12 +1317,9 @@
- - + From d7e0288cea87c3d2af443151dcc9b7ecbf06e194 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 31 Aug 2018 10:40:04 -0400 Subject: [PATCH 004/114] #4410 Remove unused code --- .../edu/harvard/iq/dataverse/DatasetPage.java | 28 +------- src/main/webapp/dataset.xhtml | 69 +++++-------------- 2 files changed, 17 insertions(+), 80 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 4b0ac537f48..6218afe5f49 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2327,33 +2327,7 @@ private void saveLink(Dataverse dataverse){ } } - - public String saveLinkedDataset() { - if (linkingDataverseId == null) { - JsfHelper.addFlashMessage(BundleUtil.getStringFromBundle("dataverse.link.select")); - return ""; - } - linkingDataverse = dataverseService.find(linkingDataverseId); - if (readOnly) { - // Pass a "real", non-readonly dataset the the LinkDatasetCommand: - dataset = datasetService.find(dataset.getId()); - } - LinkDatasetCommand cmd = new LinkDatasetCommand(dvRequestService.getDataverseRequest(), linkingDataverse, dataset); - try { - commandEngine.submit(cmd); - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.linkSuccess", getSuccessMessageArguments())); - } catch (CommandException ex) { - String msg = "There was a problem linking this dataset to yours: " + ex; - logger.severe(msg); - msg = BundleUtil.getStringFromBundle("dataset.notlinked.msg") + ex; - /** - * @todo how do we get this message to show up in the GUI? - */ - FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), msg); - FacesContext.getCurrentInstance().addMessage(null, message); - } - return returnToLatestVersion(); - } + public List completeLinkingDataverse(String query) { dataset = datasetService.find(dataset.getId()); diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index a40fb52376b..5d7ca8b9e56 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1262,60 +1262,23 @@ - - - -

#{bundle['dataverse.link.no.choice']}

-
- -

#{bundle['dataverse.link.dataset.choose']}

-
-
-
- -
- - - - -

#{DatasetPage.linkingDataverse.displayName}

-
-
-
-
-
- - - - - - - + + + - - - -
- - -

#{bundle['dataverse.link.no.linkable']}

-
- -

#{bundle['dataverse.link.no.linkable.remaining']}

-
-
+ + + + + +
Date: Fri, 31 Aug 2018 13:51:20 -0400 Subject: [PATCH 005/114] Added back link dataset msg's to popup, cleaned up layout. [ref #4410] --- src/main/java/Bundle.properties | 5 +- src/main/webapp/dataset.xhtml | 91 ++++++++++++++++++++++++++------- 2 files changed, 76 insertions(+), 20 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 5724ccaad00..31d4d44c66b 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -678,11 +678,12 @@ dataverse.contact=Email Dataverse Contact dataset.link=Link Dataset dataverse.link=Link Dataverse dataverse.link.btn.tip=Link to Your Dataverse -dataverse.link.yourDataverses=Your {0, choice, 1#Dataverse|2#Dataverses} +dataverse.link.yourDataverses=Your Dataverse +dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name dataverse.link.save=Save Linked Dataverse dataset.link.save=Save Linked Dataset dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. -dataverse.link.dataset.choose=Choose which of your dataverses you would like to link this dataset to. +dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. dataverse.link.dataset.none=No linkable dataverses available. dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Click on the Add Data button on the homepage to get started. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 5d7ca8b9e56..a158ab77a56 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1261,24 +1261,76 @@
- - - - - - - - - - - + + + + + + + + + + +

#{bundle['dataverse.link.no.choice']}

+
+ +

#{bundle['dataverse.link.dataset.choose']}

+
+
+
+ +
+ + + + + + + + + + + + + + + + + + + +

#{DatasetPage.linkingDataverse.displayName}

+
+
+
+
+
+ + + + + + + +

#{bundle['dataverse.link.no.linkable']}

+
+ +

#{bundle['dataverse.link.no.linkable.remaining']}

+
+
+ + + +
+ + +
From 8c4d1ca5d9f0a9463b6c7bdd0e52193bc524fc0c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 31 Aug 2018 13:54:18 -0400 Subject: [PATCH 006/114] #4410 remove more unused code --- .../harvard/iq/dataverse/DataversePage.java | 53 ------------------- 1 file changed, 53 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index 3f8a7347cd4..b0b217313b5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -181,59 +181,6 @@ public LinkMode getLinkMode() { public void setLinkMode(LinkMode linkMode) { this.linkMode = linkMode; } - - public void setupLinkingPopup (String popupSetting){ - if (popupSetting.equals("link")){ - setLinkMode(LinkMode.LINKDATAVERSE); - } else { - setLinkMode(LinkMode.SAVEDSEARCH); - } - updateLinkableDataverses(); - } - - public void updateLinkableDataverses() { - dataversesForLinking = new ArrayList<>(); - linkingDVSelectItems = new ArrayList<>(); - - //Since only a super user function add all dvs - dataversesForLinking = dataverseService.findAll();// permissionService.getDataversesUserHasPermissionOn(session.getUser(), Permission.PublishDataverse); - - /* - List roles = dataverseRoleServiceBean.getDataverseRolesByPermission(Permission.PublishDataverse, dataverse.getId()); - List types = new ArrayList(); - types.add("Dataverse"); - for (Long dvIdAsInt : permissionService.getDvObjectIdsUserHasRoleOn(session.getUser(), roles, types, false)) { - dataversesForLinking.add(dataverseService.find(dvIdAsInt)); - }*/ - - //for linking - make sure the link hasn't occurred and its not int the tree - if (this.linkMode.equals(LinkMode.LINKDATAVERSE)) { - - // remove this and it's parent tree - dataversesForLinking.remove(dataverse); - Dataverse testDV = dataverse; - while(testDV.getOwner() != null){ - dataversesForLinking.remove(testDV.getOwner()); - testDV = testDV.getOwner(); - } - - for (Dataverse removeLinked : linkingService.findLinkingDataverses(dataverse.getId())) { - dataversesForLinking.remove(removeLinked); - } - } else{ - //for saved search add all - - } - - for (Dataverse selectDV : dataversesForLinking) { - linkingDVSelectItems.add(new SelectItem(selectDV.getId(), selectDV.getDisplayName())); - } - - if (!dataversesForLinking.isEmpty() && dataversesForLinking.size() == 1 && dataversesForLinking.get(0) != null) { - linkingDataverse = dataversesForLinking.get(0); - linkingDataverseId = linkingDataverse.getId(); - } - } public void updateSelectedLinkingDV(ValueChangeEvent event) { linkingDataverseId = (Long) event.getNewValue(); From bda5a58a0a10f506edbcbd2c669791a34643f92c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 31 Aug 2018 14:44:45 -0400 Subject: [PATCH 007/114] #4410 file code delete --- .../edu/harvard/iq/dataverse/DatasetPage.java | 39 -------------- .../harvard/iq/dataverse/DataversePage.java | 53 +++++++++++++++++++ 2 files changed, 53 insertions(+), 39 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 6218afe5f49..688e2276ded 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -750,45 +750,6 @@ public void updateReleasedVersions(){ } - public void updateLinkableDataverses() { - dataversesForLinking = new ArrayList<>(); - linkingDVSelectItems = new ArrayList<>(); - - //Since this is a super user we are getting all dataverses - dataversesForLinking = dataverseService.findAll(); - if (dataversesForLinking.isEmpty()) { - setNoDVsAtAll(true); - return; - } - - dataversesForLinking.remove(dataset.getOwner()); - Dataverse testDV = dataset.getOwner(); - while(testDV.getOwner() != null){ - dataversesForLinking.remove(testDV.getOwner()); - testDV = testDV.getOwner(); - } - - for (Dataverse removeLinked : dsLinkingService.findLinkingDataverses(dataset.getId())) { - dataversesForLinking.remove(removeLinked); - } - for (Dataverse removeLinked : dvLinkingService.findLinkingDataverses(dataset.getOwner().getId())) { - dataversesForLinking.remove(removeLinked); - } - - if (dataversesForLinking.isEmpty()) { - setNoDVsRemaining(true); - return; - } - - for (Dataverse selectDV : dataversesForLinking) { - linkingDVSelectItems.add(new SelectItem(selectDV.getId(), selectDV.getDisplayName())); - } - - if (!dataversesForLinking.isEmpty() && dataversesForLinking.size() == 1 && dataversesForLinking.get(0) != null) { - linkingDataverse = dataversesForLinking.get(0); - linkingDataverseId = linkingDataverse.getId(); - } - } public void updateSelectedLinkingDV(ValueChangeEvent event) { linkingDataverseId = (Long) event.getNewValue(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index b0b217313b5..3f8a7347cd4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -181,6 +181,59 @@ public LinkMode getLinkMode() { public void setLinkMode(LinkMode linkMode) { this.linkMode = linkMode; } + + public void setupLinkingPopup (String popupSetting){ + if (popupSetting.equals("link")){ + setLinkMode(LinkMode.LINKDATAVERSE); + } else { + setLinkMode(LinkMode.SAVEDSEARCH); + } + updateLinkableDataverses(); + } + + public void updateLinkableDataverses() { + dataversesForLinking = new ArrayList<>(); + linkingDVSelectItems = new ArrayList<>(); + + //Since only a super user function add all dvs + dataversesForLinking = dataverseService.findAll();// permissionService.getDataversesUserHasPermissionOn(session.getUser(), Permission.PublishDataverse); + + /* + List roles = dataverseRoleServiceBean.getDataverseRolesByPermission(Permission.PublishDataverse, dataverse.getId()); + List types = new ArrayList(); + types.add("Dataverse"); + for (Long dvIdAsInt : permissionService.getDvObjectIdsUserHasRoleOn(session.getUser(), roles, types, false)) { + dataversesForLinking.add(dataverseService.find(dvIdAsInt)); + }*/ + + //for linking - make sure the link hasn't occurred and its not int the tree + if (this.linkMode.equals(LinkMode.LINKDATAVERSE)) { + + // remove this and it's parent tree + dataversesForLinking.remove(dataverse); + Dataverse testDV = dataverse; + while(testDV.getOwner() != null){ + dataversesForLinking.remove(testDV.getOwner()); + testDV = testDV.getOwner(); + } + + for (Dataverse removeLinked : linkingService.findLinkingDataverses(dataverse.getId())) { + dataversesForLinking.remove(removeLinked); + } + } else{ + //for saved search add all + + } + + for (Dataverse selectDV : dataversesForLinking) { + linkingDVSelectItems.add(new SelectItem(selectDV.getId(), selectDV.getDisplayName())); + } + + if (!dataversesForLinking.isEmpty() && dataversesForLinking.size() == 1 && dataversesForLinking.get(0) != null) { + linkingDataverse = dataversesForLinking.get(0); + linkingDataverseId = linkingDataverse.getId(); + } + } public void updateSelectedLinkingDV(ValueChangeEvent event) { linkingDataverseId = (Long) event.getNewValue(); From 2f99f60f641343891cfe072538e9eab1abada57c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 31 Aug 2018 15:14:21 -0400 Subject: [PATCH 008/114] #4410 fix error messages --- src/main/java/Bundle.properties | 2 +- .../edu/harvard/iq/dataverse/DatasetPage.java | 3 ++- src/main/webapp/dataset.xhtml | 23 +------------------ 3 files changed, 4 insertions(+), 24 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 31d4d44c66b..2ab89002a29 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1247,7 +1247,7 @@ dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lo dataset.message.createSuccess=This dataset has been created dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again. dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again. -dataset.message.linkSuccess= {0} has been successfully linked to {1}. +dataset.message.linkSuccess= {0} has been successfully linked to {1}. If you need to remove this link in the future, please contact {2}. dataset.message.metadataSuccess=The metadata for this dataset has been updated. dataset.message.termsSuccess=The terms for this dataset has been updated. dataset.message.filesSuccess=The files for this dataset have been updated. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 688e2276ded..40ec9c6d095 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2251,12 +2251,13 @@ private List getSuccessMessageArguments() { arguments.add(StringEscapeUtils.escapeHtml(dataset.getDisplayName())); String linkString = "" + StringEscapeUtils.escapeHtml(linkingDataverse.getDisplayName()) + ""; arguments.add(linkString); + arguments.add(settingsWrapper.getSupportTeamName()); return arguments; } public String saveLinkingDataverses(){ if (selectedDataversesForLinking == null || selectedDataversesForLinking.isEmpty()) { - JsfHelper.addFlashMessage(BundleUtil.getStringFromBundle("dataverse.link.select")); + JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataverse.link.select")); return ""; } for (Dataverse dv : selectedDataversesForLinking){ diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index a158ab77a56..d618fa5b343 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -186,7 +186,7 @@ #{bundle['link']} @@ -1269,13 +1269,6 @@ - - -

#{bundle['dataverse.link.no.choice']}

-
- -

#{bundle['dataverse.link.dataset.choose']}

-
- - - - - - - - -

#{bundle['dataverse.link.no.linkable']}

-
- -

#{bundle['dataverse.link.no.linkable.remaining']}

-
-
From 0e42fbbae70635db9070448238057cce983f582f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 31 Aug 2018 15:29:59 -0400 Subject: [PATCH 009/114] #4410 return prompt for dataverse name --- src/main/webapp/dataset.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index d618fa5b343..60505c61724 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1278,7 +1278,7 @@ - +

#{bundle['dataverse.link.dataset.choose']}

Date: Thu, 6 Sep 2018 10:41:03 -0400 Subject: [PATCH 010/114] #4410 Fix none selected error; support multiple link success message --- src/main/java/Bundle.properties | 1 + .../edu/harvard/iq/dataverse/DatasetPage.java | 78 +++++++++++++++---- src/main/webapp/dataset.xhtml | 19 ++--- 3 files changed, 73 insertions(+), 25 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 2ab89002a29..12fbedc4c28 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1905,6 +1905,7 @@ dataverse.edit.msg=Edit Dataverse dataverse.edit.detailmsg= - Edit your dataverse and click Save. Asterisks indicate required fields. dataverse.feature.update=The featured dataverses for this dataverse have been updated. dataverse.link.select=You must select a linking dataverse. +dataset.noSelectedDataverse.header=Select Dataverse(s) dataverse.link.user=Only authenticated users can link a dataverse. dataverse.link.error=Unable to link {0} to {1}. An internal error occurred. dataverse.search.user=Only authenticated users can save a search. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 40ec9c6d095..4cd597e2882 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -93,6 +93,7 @@ import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean; import edu.harvard.iq.dataverse.export.SchemaDotOrgExporter; import java.util.Collections; +import javax.faces.component.UIInput; import javax.faces.event.AjaxBehaviorEvent; import javax.servlet.ServletOutputStream; @@ -2249,25 +2250,75 @@ public void updateFileCounts(){ private List getSuccessMessageArguments() { List arguments = new ArrayList<>(); arguments.add(StringEscapeUtils.escapeHtml(dataset.getDisplayName())); - String linkString = "" + StringEscapeUtils.escapeHtml(linkingDataverse.getDisplayName()) + ""; - arguments.add(linkString); + + arguments.add(linkedDVSuccessMessage); arguments.add(settingsWrapper.getSupportTeamName()); return arguments; } + + private void addLinkedDVMessage(Dataverse linkingDataverse) { + + if (linkedDVSuccessMessage != null && !linkedDVSuccessMessage.isEmpty()) linkedDVSuccessMessage.concat(", "); + + linkedDVSuccessMessage+= " " + StringEscapeUtils.escapeHtml(linkingDataverse.getDisplayName()) + ""; + + } - public String saveLinkingDataverses(){ + public void saveLinkingDataverses(){ + if (selectedDataversesForLinking == null || selectedDataversesForLinking.isEmpty()) { - JsfHelper.addErrorMessage(BundleUtil.getStringFromBundle("dataverse.link.select")); - return ""; + + FacesContext.getCurrentInstance().addMessage(getSelectedDataverseMenu().getClientId(), + new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("dataverse.link.select"))); + return; } + boolean success = true; for (Dataverse dv : selectedDataversesForLinking){ - saveLink(dv); + success &= saveLink(dv); + } + + if(success){ + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.linkSuccess", getSuccessMessageArguments())); + } else{ + + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), linkingDataverseErrorMessage); + FacesContext.getCurrentInstance().addMessage(null, message); } - return ""; } - private void saveLink(Dataverse dataverse){ - + private String linkingDataverseErrorMessage = ""; + private String linkedDVSuccessMessage = ""; + + public String getLinkingDataverseErrorMessage() { + return linkingDataverseErrorMessage; + } + + public void setLinkingDataverseErrorMessage(String linkingDataverseErrorMessage) { + this.linkingDataverseErrorMessage = linkingDataverseErrorMessage; + } + + public String getLinkedDVSuccessMessage() { + return linkedDVSuccessMessage; + } + + public void setLinkedDVSuccessMessage(String linkedDVSuccessMessage) { + this.linkedDVSuccessMessage = linkedDVSuccessMessage; + } + + + UIInput selectedLinkingDataverseMenu; + + public UIInput getSelectedDataverseMenu() { + return selectedLinkingDataverseMenu; + } + + public void setSelectedDataverseMenu(UIInput selectedDataverseMenu) { + this.selectedLinkingDataverseMenu = selectedDataverseMenu; + } + + + private Boolean saveLink(Dataverse dataverse){ + boolean retVal = true; if (readOnly) { // Pass a "real", non-readonly dataset the the LinkDatasetCommand: dataset = datasetService.find(dataset.getId()); @@ -2276,7 +2327,8 @@ private void saveLink(Dataverse dataverse){ linkingDataverse = dataverse; try { commandEngine.submit(cmd); - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.linkSuccess", getSuccessMessageArguments())); + addLinkedDVMessage(linkingDataverse); + } catch (CommandException ex) { String msg = "There was a problem linking this dataset to yours: " + ex; logger.severe(msg); @@ -2284,10 +2336,10 @@ private void saveLink(Dataverse dataverse){ /** * @todo how do we get this message to show up in the GUI? */ - FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), msg); - FacesContext.getCurrentInstance().addMessage(null, message); + linkingDataverseErrorMessage = msg; + retVal = false; } - + return retVal; } diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 60505c61724..e9284be1896 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -998,7 +998,7 @@
- +

#{bundle['dataset.noSelectedFilesForDownload']}

@@ -1281,10 +1281,11 @@

#{bundle['dataverse.link.dataset.choose']}

@@ -1298,11 +1299,7 @@ - - - -

#{DatasetPage.linkingDataverse.displayName}

-
+
@@ -1311,17 +1308,15 @@
-
- - - +
From ae31a4dadb370f811e562f180906b77759aba8e3 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 7 Sep 2018 10:22:23 -0400 Subject: [PATCH 011/114] #4410 code cleanup add msgs to bundle --- src/main/java/Bundle.properties | 2 + .../edu/harvard/iq/dataverse/DatasetPage.java | 62 ++++++++----------- .../iq/dataverse/DataverseServiceBean.java | 16 +---- .../command/impl/LinkDatasetCommand.java | 16 ++--- src/main/webapp/dataset.xhtml | 12 +--- 5 files changed, 38 insertions(+), 70 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 12fbedc4c28..b62293d7717 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -682,6 +682,8 @@ dataverse.link.yourDataverses=Your Dataverse dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name dataverse.link.save=Save Linked Dataverse dataset.link.save=Save Linked Dataset +dataset.link.not.to.owner=Can't link a dataset to its dataverse +dataset.link.not.to.parent.dataverse=Can't link a dataset to its parent dataverses dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. dataverse.link.dataset.none=No linkable dataverses available. diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 4cd597e2882..7937a7af007 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -239,7 +239,6 @@ public enum DisplayMode { private boolean stateChanged = false; - private List dataversesForLinking = new ArrayList<>(); private Long linkingDataverseId; private List linkingDVSelectItems; private Dataverse linkingDataverse; @@ -737,13 +736,7 @@ public void setLinkingDataverseId(Long linkingDataverseId) { this.linkingDataverseId = linkingDataverseId; } - public List getDataversesForLinking() { - return dataversesForLinking; - } - public void setDataversesForLinking(List dataversesForLinking) { - this.dataversesForLinking = dataversesForLinking; - } public void updateReleasedVersions(){ @@ -2249,29 +2242,38 @@ public void updateFileCounts(){ private List getSuccessMessageArguments() { List arguments = new ArrayList<>(); + String dataverseString = ""; arguments.add(StringEscapeUtils.escapeHtml(dataset.getDisplayName())); - - arguments.add(linkedDVSuccessMessage); + for (Dataverse dv: selectedDataversesForLinking ){ + dataverseString += " " + StringEscapeUtils.escapeHtml(dv.getDisplayName()) + ""; + } + arguments.add(dataverseString); arguments.add(settingsWrapper.getSupportTeamName()); return arguments; } - private void addLinkedDVMessage(Dataverse linkingDataverse) { - - if (linkedDVSuccessMessage != null && !linkedDVSuccessMessage.isEmpty()) linkedDVSuccessMessage.concat(", "); - - linkedDVSuccessMessage+= " " + StringEscapeUtils.escapeHtml(linkingDataverse.getDisplayName()) + ""; - - } - public void saveLinkingDataverses(){ + public void saveLinkingDataverses() { if (selectedDataversesForLinking == null || selectedDataversesForLinking.isEmpty()) { - - FacesContext.getCurrentInstance().addMessage(getSelectedDataverseMenu().getClientId(), - new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("dataverse.link.select"))); + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_ERROR, "", BundleUtil.getStringFromBundle("dataverse.link.select")); + FacesContext.getCurrentInstance().addMessage(null, message); return; } + + for (Dataverse dv : selectedDataversesForLinking) { + if (dataset.getOwner().equals(dv)) { + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), BundleUtil.getStringFromBundle("dataset.link.not.to.owner")); + FacesContext.getCurrentInstance().addMessage(null, message); + return; + } + if (dataset.getOwner().getOwners().contains(dv)) { + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), BundleUtil.getStringFromBundle("dataset.link.not.to.parent.dataverse")); + FacesContext.getCurrentInstance().addMessage(null, message); + return; + } + } + boolean success = true; for (Dataverse dv : selectedDataversesForLinking){ success &= saveLink(dv); @@ -2279,15 +2281,14 @@ public void saveLinkingDataverses(){ if(success){ JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.linkSuccess", getSuccessMessageArguments())); - } else{ - + } else{ FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), linkingDataverseErrorMessage); FacesContext.getCurrentInstance().addMessage(null, message); } } private String linkingDataverseErrorMessage = ""; - private String linkedDVSuccessMessage = ""; + public String getLinkingDataverseErrorMessage() { return linkingDataverseErrorMessage; @@ -2296,15 +2297,6 @@ public String getLinkingDataverseErrorMessage() { public void setLinkingDataverseErrorMessage(String linkingDataverseErrorMessage) { this.linkingDataverseErrorMessage = linkingDataverseErrorMessage; } - - public String getLinkedDVSuccessMessage() { - return linkedDVSuccessMessage; - } - - public void setLinkedDVSuccessMessage(String linkedDVSuccessMessage) { - this.linkedDVSuccessMessage = linkedDVSuccessMessage; - } - UIInput selectedLinkingDataverseMenu; @@ -2326,9 +2318,7 @@ private Boolean saveLink(Dataverse dataverse){ LinkDatasetCommand cmd = new LinkDatasetCommand(dvRequestService.getDataverseRequest(), dataverse, dataset); linkingDataverse = dataverse; try { - commandEngine.submit(cmd); - addLinkedDVMessage(linkingDataverse); - + commandEngine.submit(cmd); } catch (CommandException ex) { String msg = "There was a problem linking this dataset to yours: " + ex; logger.severe(msg); @@ -2346,7 +2336,7 @@ private Boolean saveLink(Dataverse dataverse){ public List completeLinkingDataverse(String query) { dataset = datasetService.find(dataset.getId()); if (session.getUser().isAuthenticated()) { - return dataverseService.filterDataversesForLinking(query, (AuthenticatedUser) session.getUser(), dataset); + return dataverseService.filterDataversesForLinking(query, dvRequestService.getDataverseRequest(), dataset); } else { return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 370cd206169..3f49019318e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -6,12 +6,10 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.groups.Group; -import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; -import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -27,7 +25,6 @@ import java.util.ResourceBundle; import java.util.MissingResourceException; import java.util.Properties; -import java.util.Set; import java.util.concurrent.Future; import java.util.jar.Attributes; import java.util.jar.Manifest; @@ -40,7 +37,6 @@ import javax.persistence.NonUniqueResultException; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; -import org.apache.commons.lang.StringUtils; /** * @@ -57,12 +53,6 @@ public class DataverseServiceBean implements java.io.Serializable { @EJB DatasetServiceBean datasetService; - @EJB - GroupServiceBean groupSvc; - - @EJB - ExplicitGroupServiceBean explicitGroupSvc; - @EJB DataverseLinkingServiceBean dataverseLinkingService; @@ -457,7 +447,7 @@ public List filterByAliasQuery(String filterQuery) { return ret; } - public List filterDataversesForLinking(String query, AuthenticatedUser user, Dataset dataset) { + public List filterDataversesForLinking(String query, DataverseRequest req, Dataset dataset) { List dataverseList = new ArrayList<>(); @@ -476,7 +466,7 @@ public List filterDataversesForLinking(String query, AuthenticatedUse for (Dataverse res : results) { if (!remove.contains(res)) { - if (this.permissionService.userOn(user, res).has(Permission.PublishDataverse)) { + if (this.permissionService.requestOn(req, res).has(Permission.PublishDataverse)) { dataverseList.add(res); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java index 7c006aa4e64..96e0e9e0966 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java @@ -9,16 +9,14 @@ import edu.harvard.iq.dataverse.DatasetLinkingDataverse; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; -import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.util.BundleUtil; import java.sql.Timestamp; -import java.util.Collections; import java.util.Date; /** @@ -39,16 +37,12 @@ public LinkDatasetCommand(DataverseRequest aRequest, Dataverse dataverse, Datase @Override public DatasetLinkingDataverse execute(CommandContext ctxt) throws CommandException { - /* - if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) { - throw new PermissionException("Link Dataset can only be called by superusers.", - this, Collections.singleton(Permission.PublishDataverse), linkingDataverse); - }*/ - if (linkedDataset.getOwner().equals(linkingDataverse)) { - throw new IllegalCommandException("Can't link a dataset to its dataverse", this); + + if (linkedDataset.getOwner().equals(linkingDataverse)) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.link.not.to.owner"), this); } if (linkedDataset.getOwner().getOwners().contains(linkingDataverse)) { - throw new IllegalCommandException("Can't link a dataset to its parent dataverses", this); + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.link.not.to.parent.dataverse"), this); } DatasetLinkingDataverse datasetLinkingDataverse = new DatasetLinkingDataverse(); diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index e9284be1896..6c058be1bda 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -185,8 +185,7 @@ #{bundle['link']} @@ -1267,17 +1266,13 @@ - -
- - - +

#{bundle['dataverse.link.dataset.choose']}

- - -
Date: Fri, 7 Sep 2018 15:56:05 -0400 Subject: [PATCH 012/114] #4410 limit linking to one dv at a time modify perms --- .../edu/harvard/iq/dataverse/DatasetPage.java | 38 +++++-------------- .../iq/dataverse/DataverseServiceBean.java | 4 +- .../command/impl/LinkDatasetCommand.java | 2 +- src/main/webapp/dataset.xhtml | 10 ++--- 4 files changed, 16 insertions(+), 38 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 7937a7af007..a81a4388c84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2069,14 +2069,14 @@ public void setSelectedFiles(List selectedFiles) { this.selectedFiles = selectedFiles; } - private List selectedDataversesForLinking = new ArrayList<>(); + private Dataverse selectedDataverseForLinking; - public List getSelectedDataversesForLinking() { - return selectedDataversesForLinking; + public Dataverse getSelectedDataverseForLinking() { + return selectedDataverseForLinking; } - public void setSelectedDataversesForLinking(List selectedDataversesForLinking) { - this.selectedDataversesForLinking = selectedDataversesForLinking; + public void setSelectedDataverseForLinking(Dataverse sdvfl) { + this.selectedDataverseForLinking = sdvfl; } private List selectedRestrictedFiles; // = new ArrayList<>(); @@ -2244,9 +2244,7 @@ private List getSuccessMessageArguments() { List arguments = new ArrayList<>(); String dataverseString = ""; arguments.add(StringEscapeUtils.escapeHtml(dataset.getDisplayName())); - for (Dataverse dv: selectedDataversesForLinking ){ - dataverseString += " " + StringEscapeUtils.escapeHtml(dv.getDisplayName()) + ""; - } + dataverseString += " " + StringEscapeUtils.escapeHtml(selectedDataverseForLinking.getDisplayName()) + ""; arguments.add(dataverseString); arguments.add(settingsWrapper.getSupportTeamName()); return arguments; @@ -2255,31 +2253,13 @@ private List getSuccessMessageArguments() { public void saveLinkingDataverses() { - if (selectedDataversesForLinking == null || selectedDataversesForLinking.isEmpty()) { + if (selectedDataverseForLinking == null) { FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_ERROR, "", BundleUtil.getStringFromBundle("dataverse.link.select")); FacesContext.getCurrentInstance().addMessage(null, message); return; - } + } - for (Dataverse dv : selectedDataversesForLinking) { - if (dataset.getOwner().equals(dv)) { - FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), BundleUtil.getStringFromBundle("dataset.link.not.to.owner")); - FacesContext.getCurrentInstance().addMessage(null, message); - return; - } - if (dataset.getOwner().getOwners().contains(dv)) { - FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), BundleUtil.getStringFromBundle("dataset.link.not.to.parent.dataverse")); - FacesContext.getCurrentInstance().addMessage(null, message); - return; - } - } - - boolean success = true; - for (Dataverse dv : selectedDataversesForLinking){ - success &= saveLink(dv); - } - - if(success){ + if(saveLink(selectedDataverseForLinking)){ JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.linkSuccess", getSuccessMessageArguments())); } else{ FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), linkingDataverseErrorMessage); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 3f49019318e..23f677fcd44 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -185,7 +185,7 @@ public Dataverse findByAlias(String anAlias) { return null; } } - + public boolean hasData( Dataverse dv ) { TypedQuery amountQry = em.createNamedQuery("Dataverse.ownedObjectsById", Long.class) .setParameter("id", dv.getId()); @@ -466,7 +466,7 @@ public List filterDataversesForLinking(String query, DataverseRequest for (Dataverse res : results) { if (!remove.contains(res)) { - if (this.permissionService.requestOn(req, res).has(Permission.PublishDataverse)) { + if (this.permissionService.requestOn(req, res).has(Permission.PublishDataset)) { dataverseList.add(res); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java index 96e0e9e0966..e3cfb1ad421 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java @@ -23,7 +23,7 @@ * * @author skraffmiller */ -@RequiredPermissions(Permission.PublishDataverse) +@RequiredPermissions(Permission.PublishDataset) public class LinkDatasetCommand extends AbstractCommand { private final Dataset linkedDataset; diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 6c058be1bda..587c8ae9c6b 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1279,17 +1279,15 @@ binding="#{DatasetPage.selectedDataverseMenu}" placeholder="#{bundle['dataverse.link.yourDataverses.inputPlaceholder']}" emptyMessage="#{bundle['dataverse.link.dataset.none']}" - multiple="true" scrollHeight="180" forceSelection="true" - minQueryLength="3" queryDelay="1000" - value="#{DatasetPage.selectedDataversesForLinking}" + scrollHeight="180" forceSelection="true" + minQueryLength="1" queryDelay="1000" + value="#{DatasetPage.selectedDataverseForLinking}" + multiple="false" completeMethod="#{DatasetPage.completeLinkingDataverse}" var="dataverseLk" itemLabel="#{dataverseLk.displayName}" itemValue="#{dataverseLk}" converter="dataverseConverter"> - - - From 13abdb62d48a51766f3706debb263238de26f6f6 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 7 Sep 2018 16:16:15 -0400 Subject: [PATCH 013/114] #4410 add "alias" to dv linking selector --- src/main/webapp/dataset.xhtml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 587c8ae9c6b..52f1e7c3f0c 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1288,6 +1288,9 @@ + + + From 190fd653f99839d6ffefcdae0188a97ba39a422e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 10 Sep 2018 13:19:10 -0400 Subject: [PATCH 014/114] #4410 Add Dataverse to Applies to for publish Dataset perm --- .../java/edu/harvard/iq/dataverse/authorization/Permission.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java b/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java index c7651681e19..5337d02d559 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/Permission.java @@ -47,7 +47,7 @@ public enum Permission implements java.io.Serializable { ManageDataversePermissions(BundleUtil.getStringFromBundle("permission.managePermissionsDataverse"), true, Dataverse.class), ManageDatasetPermissions(BundleUtil.getStringFromBundle("permission.managePermissionsDataset"), true, Dataset.class), PublishDataverse(BundleUtil.getStringFromBundle("permission.publishDataverse"), true, Dataverse.class), - PublishDataset(BundleUtil.getStringFromBundle("permission.publishDataset"), true, Dataset.class), + PublishDataset(BundleUtil.getStringFromBundle("permission.publishDataset"), true, Dataset.class, Dataverse.class), // Delete DeleteDataverse(BundleUtil.getStringFromBundle("permission.deleteDataverse"), true, Dataverse.class), DeleteDatasetDraft(BundleUtil.getStringFromBundle("permission.deleteDataset"), true, Dataset.class); From 780b89e88cdcf21ff151be9f27485b7b68e13fab Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Mon, 10 Sep 2018 17:53:35 -0400 Subject: [PATCH 015/114] 4990 first past instance creation - Does not take in a branch name or propigate that to the instance - Needs more thinking on pem creation - Needs another script or other ideas around tearing down instances --- scripts/installer/ec2.sh | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100755 scripts/installer/ec2.sh diff --git a/scripts/installer/ec2.sh b/scripts/installer/ec2.sh new file mode 100755 index 00000000000..c2b8ced0919 --- /dev/null +++ b/scripts/installer/ec2.sh @@ -0,0 +1,24 @@ +#Refering to this doc: https://docs.aws.amazon.com/cli/latest/userguide/tutorial-ec2-ubuntu.html + +#This needs to take in an argument of the branch name + +#Create security group if it doesn't already exist +echo "*Creating security group" +aws ec2 create-security-group --group-name devenv-sg --description "security group for development environment" +aws ec2 authorize-security-group-ingress --group-name devenv-sg --protocol tcp --port 22 --cidr 0.0.0.0/0 +echo "*End creating security group" + +#Create key pair. Does this pem need to be saved or just held temporarilly? +# - Probably held, we probably need another script to blow away our spinned-up ec2 instance +# - Should attach the branch name to the key +echo "*Creating key pair" +aws ec2 create-key-pair --key-name devenv-key --query 'KeyMaterial' --output text > devenv-key.pem +chmod 400 devenv-key.pem +echo "*End creating key pair" + +#AMI ID acquired by this (very slow) query Sept 10th 2018 +#aws ec2 describe-images --owners 'aws-marketplace' --filters 'Name=product-code,Values=aw0evgkw8e5c1q413zgy5pjce' --query 'sort_by(Images, &CreationDate)[-1].[ImageId]' --output 'text' + +echo "*Creating ec2 instance" +aws ec2 run-instances --image-id ami-9887c6e7 --security-groups devenv-sg --count 1 --instance-type t2.micro --key-name devenv-key --query 'Instances[0].InstanceId' +echo "*End creating EC2 instance" \ No newline at end of file From be4637d112b55711f4ab69f6fd7b5a3f9dc5d5f6 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 10 Sep 2018 19:49:56 -0400 Subject: [PATCH 016/114] added checks for the exit status of the first pass of the postgres configuration setup script. (#4966) --- scripts/installer/install | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/scripts/installer/install b/scripts/installer/install index ce69caa52f1..32d41cddd6d 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -542,6 +542,12 @@ my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; my $done = &setup(); +unless ($done == 1) +{ + print STDERR "Postgres setup has failed!\n"; + print STDERR "Aborting - sorry...\n\n"; + exit 1; +} # 7. PRE-POPULATE THE DATABASE: # (in this step some pre-supplied content is inserted into the database that we have just created; @@ -569,6 +575,8 @@ unless (( print STDERR "It appears that the Dataverse application is not running...\n"; print STDERR "Even though the \"asadmin deploy\" command had succeeded earlier.\n\n"; print STDERR "Aborting - sorry...\n\n"; + + exit 1; } } @@ -750,7 +758,8 @@ sub setup { $ENV{'WARFILE_LOCATION'} = $WARFILE_LOCATION; - #start-glassfish is a container name for OpenShift deployment + #start-glassfish is a container name for OpenShift deployment + my $postgres_setup_status; if ( exists($ENV{'MY_POD_NAME'}) ) { # assume that the second postgres-setup call is sufficient outside OpenShift @@ -758,12 +767,20 @@ sub setup { unless($ENV{"MY_POD_NAME"} eq "start-glassfish" || $ENV{"MY_POD_NAME"} eq "dataverse-glassfish-0") { system("perl ./postgres-setup"); } + $postgres_setup_status = $? } else { - system("perl ./postgres-setup"); + system("perl ./postgres-setup"); + $postgres_setup_status = $? } + if ($postgres_setup_status != undef && $postgres_setup_status != 0) + { + print "First postgres configuration pass failed!\n"; + return $failure; + } + print "Finished configuring postgres \n"; #Start and Setup Glassfish + Deploy war file @@ -777,7 +794,8 @@ sub setup { system("perl ./glassfish-startup -y"); } - print "Finished configuring postgres \n"; + # TODO: check the exit code of the script above! + print "Finished configuring Glassfish and deploying the dataverse application. \n"; #Pre-populate the database $ENV{'PREPOPULATE'} = 1; From 884ae9056bfc676336580c3e1fb1d42c92455f5e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 11 Sep 2018 08:57:10 -0400 Subject: [PATCH 017/114] check for existence of security group before creating it #4990 --- scripts/installer/ec2.sh | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/scripts/installer/ec2.sh b/scripts/installer/ec2.sh index c2b8ced0919..3fc8810b630 100755 --- a/scripts/installer/ec2.sh +++ b/scripts/installer/ec2.sh @@ -3,10 +3,16 @@ #This needs to take in an argument of the branch name #Create security group if it doesn't already exist -echo "*Creating security group" -aws ec2 create-security-group --group-name devenv-sg --description "security group for development environment" -aws ec2 authorize-security-group-ingress --group-name devenv-sg --protocol tcp --port 22 --cidr 0.0.0.0/0 -echo "*End creating security group" +echo "*Checking for existing security group" +GROUP_CHECK=$(aws ec2 describe-security-groups --group-name devenv-sg) +if [[ "$?" -ne 0 ]]; then + echo "*Creating security group" + aws ec2 create-security-group --group-name devenv-sg --description "security group for development environment" + aws ec2 authorize-security-group-ingress --group-name devenv-sg --protocol tcp --port 22 --cidr 0.0.0.0/0 + echo "*End creating security group" +else + echo "*Security group already exists." +fi #Create key pair. Does this pem need to be saved or just held temporarilly? # - Probably held, we probably need another script to blow away our spinned-up ec2 instance @@ -21,4 +27,4 @@ echo "*End creating key pair" echo "*Creating ec2 instance" aws ec2 run-instances --image-id ami-9887c6e7 --security-groups devenv-sg --count 1 --instance-type t2.micro --key-name devenv-key --query 'Instances[0].InstanceId' -echo "*End creating EC2 instance" \ No newline at end of file +echo "*End creating EC2 instance" From 8db31c9016ceab2c382bd4c6933aab6b07ba4b44 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Tue, 11 Sep 2018 18:13:25 -0400 Subject: [PATCH 018/114] 4990 working script first pass Script now fufills the need of creating an ec2 instance and scping in a file that has the branch name to be spun up later by ansible. This could probably be used as is, but I want to clean it up and create a tear-down script or think of some other ways we will ensure that a billion instances haven't been created. --- scripts/installer/ec2.sh | 63 +++++++++++++++++++++++++++++++++++----- 1 file changed, 56 insertions(+), 7 deletions(-) diff --git a/scripts/installer/ec2.sh b/scripts/installer/ec2.sh index 3fc8810b630..f362cdff732 100755 --- a/scripts/installer/ec2.sh +++ b/scripts/installer/ec2.sh @@ -1,6 +1,18 @@ -#Refering to this doc: https://docs.aws.amazon.com/cli/latest/userguide/tutorial-ec2-ubuntu.html +#!/bin/bash +#Initially Referred to this doc: https://docs.aws.amazon.com/cli/latest/userguide/tutorial-ec2-ubuntu.html -#This needs to take in an argument of the branch name +DEPLOY_FILE=dataverse_deploy_info.txt + +if [ "$1" = "" ]; then + echo "No branch name provided" + exit 1 +else + BRANCH_NAME=$1 + if [[ $(git ls-remote --heads https://github.com/IQSS/dataverse.git $BRANCH_NAME | wc -l) -eq 0 ]]; then + echo "Branch does not exist on the Dataverse github repo" + exit 1 + fi +fi #Create security group if it doesn't already exist echo "*Checking for existing security group" @@ -17,14 +29,51 @@ fi #Create key pair. Does this pem need to be saved or just held temporarilly? # - Probably held, we probably need another script to blow away our spinned-up ec2 instance # - Should attach the branch name to the key -echo "*Creating key pair" -aws ec2 create-key-pair --key-name devenv-key --query 'KeyMaterial' --output text > devenv-key.pem -chmod 400 devenv-key.pem -echo "*End creating key pair" +echo "*Checking for existing key pair" +if ! [ -f devenv-key.pem ]; then + echo "*Creating key pair" + PRIVATE_KEY=$(aws ec2 create-key-pair --key-name devenv-key --query 'KeyMaterial' --output text) + if [[ $PRIVATE_KEY = '-----BEGIN RSA PRIVATE KEY-----'* ]]; then + printf -- "$PRIVATE_KEY">devenv-key.pem + chmod 400 devenv-key.pem + echo "*New key pair created" + fi + echo "*End creating key pair" +else + echo "*Key pair alraedy exists." +fi #AMI ID acquired by this (very slow) query Sept 10th 2018 +#This does not need to be run every time, leaving it in here so it is remembered #aws ec2 describe-images --owners 'aws-marketplace' --filters 'Name=product-code,Values=aw0evgkw8e5c1q413zgy5pjce' --query 'sort_by(Images, &CreationDate)[-1].[ImageId]' --output 'text' +#The AMI ID only works for region us-east-1, for now just forcing that +#Using this image ID a 1-time requires subscription per root account, which was done through the UI echo "*Creating ec2 instance" -aws ec2 run-instances --image-id ami-9887c6e7 --security-groups devenv-sg --count 1 --instance-type t2.micro --key-name devenv-key --query 'Instances[0].InstanceId' +#INSTACE_ID=$(aws ec2 run-instances --image-id ami-9887c6e7 --security-groups devenv-sg --count 1 --instance-type t2.nano --key-name devenv-key --query 'Instances[0].InstanceId' --block-device-mappings file://ec2-device-mapping.json | tr -d \") +INSTACE_ID=$(aws ec2 run-instances --image-id ami-9887c6e7 --security-groups devenv-sg --count 1 --instance-type t2.nano --key-name devenv-key --query 'Instances[0].InstanceId' --block-device-mappings '[ { "DeviceName": "/dev/sda1", "Ebs": { "DeleteOnTermination": true } } ]' | tr -d \") +echo "Instance ID: "$INSTACE_ID echo "*End creating EC2 instance" + +PUBLIC_DNS=$(aws ec2 describe-instances --instance-ids $INSTACE_ID --query "Reservations[*].Instances[*].[PublicDnsName]" --output text) + +echo $BRANCH_NAME > $DEPLOY_FILE +echo "Connecting to the instance. This may take a minute as it is being spun up" +#MAD: I'm a bit confused, this says its adding it to a file even though I don't think it should. At least its passing without me pressing enter +scp -i devenv-key.pem -o 'StrictHostKeyChecking no' -o 'UserKnownHostsFile=/dev/null' $DEPLOY_FILE centos@${PUBLIC_DNS}:~ +rm -rf $DEPLOY_FILE + +echo "New EC2 instance created at $PUBLIC_DNS" + +#ssh -i devenv-key.pem centos@$PUBLIC_DNS + +#PUBLIC_IP=$() + +#echo $PUBLIC_IP + +#Outstanding needs: +# - Delete Script +# - Correct ec2 specs for our needs +# - better error handling +# - maybe less verbose messaging? +# - force region? --region us-east-1 \ No newline at end of file From 9d04a2bbcfd3d10d2e42675c24ba99d544f11e67 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Tue, 11 Sep 2018 18:37:00 -0400 Subject: [PATCH 019/114] #4990 clean up create and delete all PoC --- .../installer/{ec2.sh => ec2-create-instance.sh} | 14 ++------------ scripts/installer/ec2-destroy-all.sh | 11 +++++++++++ 2 files changed, 13 insertions(+), 12 deletions(-) rename scripts/installer/{ec2.sh => ec2-create-instance.sh} (83%) create mode 100755 scripts/installer/ec2-destroy-all.sh diff --git a/scripts/installer/ec2.sh b/scripts/installer/ec2-create-instance.sh similarity index 83% rename from scripts/installer/ec2.sh rename to scripts/installer/ec2-create-instance.sh index f362cdff732..fdd614c634f 100755 --- a/scripts/installer/ec2.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash -x #Initially Referred to this doc: https://docs.aws.amazon.com/cli/latest/userguide/tutorial-ec2-ubuntu.html DEPLOY_FILE=dataverse_deploy_info.txt @@ -26,9 +26,6 @@ else echo "*Security group already exists." fi -#Create key pair. Does this pem need to be saved or just held temporarilly? -# - Probably held, we probably need another script to blow away our spinned-up ec2 instance -# - Should attach the branch name to the key echo "*Checking for existing key pair" if ! [ -f devenv-key.pem ]; then echo "*Creating key pair" @@ -43,14 +40,13 @@ else echo "*Key pair alraedy exists." fi -#AMI ID acquired by this (very slow) query Sept 10th 2018 +#AMI ID for centos7 acquired by this (very slow) query Sept 10th 2018 #This does not need to be run every time, leaving it in here so it is remembered #aws ec2 describe-images --owners 'aws-marketplace' --filters 'Name=product-code,Values=aw0evgkw8e5c1q413zgy5pjce' --query 'sort_by(Images, &CreationDate)[-1].[ImageId]' --output 'text' #The AMI ID only works for region us-east-1, for now just forcing that #Using this image ID a 1-time requires subscription per root account, which was done through the UI echo "*Creating ec2 instance" -#INSTACE_ID=$(aws ec2 run-instances --image-id ami-9887c6e7 --security-groups devenv-sg --count 1 --instance-type t2.nano --key-name devenv-key --query 'Instances[0].InstanceId' --block-device-mappings file://ec2-device-mapping.json | tr -d \") INSTACE_ID=$(aws ec2 run-instances --image-id ami-9887c6e7 --security-groups devenv-sg --count 1 --instance-type t2.nano --key-name devenv-key --query 'Instances[0].InstanceId' --block-device-mappings '[ { "DeviceName": "/dev/sda1", "Ebs": { "DeleteOnTermination": true } } ]' | tr -d \") echo "Instance ID: "$INSTACE_ID echo "*End creating EC2 instance" @@ -65,12 +61,6 @@ rm -rf $DEPLOY_FILE echo "New EC2 instance created at $PUBLIC_DNS" -#ssh -i devenv-key.pem centos@$PUBLIC_DNS - -#PUBLIC_IP=$() - -#echo $PUBLIC_IP - #Outstanding needs: # - Delete Script # - Correct ec2 specs for our needs diff --git a/scripts/installer/ec2-destroy-all.sh b/scripts/installer/ec2-destroy-all.sh new file mode 100755 index 00000000000..044d533b6b0 --- /dev/null +++ b/scripts/installer/ec2-destroy-all.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +#This script gets all the instances from ec2 and sends terminate to them +#Its pretty basic and probably shouldn't be trusted at this point. Namely: +# - You can kill instances other people are using +# - It will try to kill instances that are already dead, which makes output hard to read +# - If it fails for some reason it's hard to tell the script didn't work right + +INSTANCES=$(aws ec2 describe-instances --query 'Reservations[].Instances[].[InstanceId]' --output text) + +aws ec2 terminate-instances --instance-ids $INSTANCES \ No newline at end of file From 4c2665bce9bd35a2e31674aa821d92fa6c4a4350 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 12 Sep 2018 09:52:07 -0400 Subject: [PATCH 020/114] #4832 Alternative PIDs for hdl Datasets --- .../AbstractGlobalIdServiceBean.java | 2 +- .../AlternativePersistentIdentifier.java | 100 ++++++++++++++++++ .../iq/dataverse/DatasetServiceBean.java | 8 +- .../edu/harvard/iq/dataverse/DvObject.java | 8 ++ .../iq/dataverse/DvObjectServiceBean.java | 14 ++- .../edu/harvard/iq/dataverse/api/Admin.java | 26 +++++ .../command/impl/RegisterDvObjectCommand.java | 49 ++++++++- 7 files changed, 200 insertions(+), 7 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java index 61e19d1221d..5f29eca3bb9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java @@ -70,7 +70,7 @@ protected Map addBasicMetadata(DvObject dvObjectIn, Map alternativePersistentIndentifiers; + /** * previewImageAvailable could also be thought of as "thumbnail has been * generated. However, were all three thumbnails generated? We might need a diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java index 9f011d276a3..f4422e1e474 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java @@ -62,18 +62,28 @@ public List findAll() { // FIXME This type-by-string has to go, in favor of passing a class parameter. public DvObject findByGlobalId(String globalIdString, String typeString) { + return findByGlobalId(globalIdString, typeString, false); + } + + // FIXME This type-by-string has to go, in favor of passing a class parameter. + public DvObject findByGlobalId(String globalIdString, String typeString, Boolean altId) { try { GlobalId gid = new GlobalId(globalIdString); DvObject foundDvObject = null; try { - Query query; - query = em.createNamedQuery("DvObject.findByGlobalId"); + Query query; + if (altId) { + query = em.createNamedQuery("DvObject.findByAlternativeGlobalId"); + } else{ + query = em.createNamedQuery("DvObject.findByGlobalId"); + } query.setParameter("identifier", gid.getIdentifier()); query.setParameter("protocol", gid.getProtocol()); query.setParameter("authority", gid.getAuthority()); query.setParameter("dtype", typeString); + System.out.print(query.toString()); foundDvObject = (DvObject) query.getSingleResult(); } catch (javax.persistence.NoResultException e) { // (set to .info, this can fill the log file with thousands of diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 242ecbd212d..b521e4240fc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.DataverseSession; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.EMailValidator; +import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; @@ -1024,6 +1025,31 @@ public Response validatePassword(String password) { public Response isOrcidEnabled() { return authSvc.isOrcidEnabled() ? ok("Orcid is enabled") : ok("no orcid for you."); } + + @GET + @Path("{id}/reregisterHDLToPID") + public Response reregisterHdlToPID(@PathParam("id") String id) { + logger.info("Starting to reregister " + id + " Dataset Id. " + new Date()); + + try { + User u = findUserOrDie(); + DataverseRequest r = createDataverseRequest(u); + Dataset ds = findDatasetOrDie(id); + if (ds.getIdentifier() != null && !ds.getIdentifier().isEmpty() && ds.getProtocol().equals(GlobalId.HDL_PROTOCOL)) { + execCommand(new RegisterDvObjectCommand(r, ds, true)); + } else { + return ok("Dataset was not registered as a HDL. "); + } + + } catch (WrappedResponse r) { + logger.info("Failed to migrate Dataset Handle id: " + id); + return badRequest("Failed to migrate Dataset Handle id: " + id); + } catch (Exception e) { + logger.info("Failed to migrate Dataset Handle id: " + id + " Unexpecgted Exception " + e.getMessage()); + return badRequest("Failed to migrate Dataset Handle id: " + id + " Unexpecgted Exception " + e.getMessage()); + } + return ok("Dataset migrate HDL registration complete. Dataset re-registered successfully."); + } @GET @Path("{id}/registerDataFile") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java index 457004d71ed..a4c0ea720f9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java @@ -1,8 +1,10 @@ package edu.harvard.iq.dataverse.engine.command.impl; +import edu.harvard.iq.dataverse.AlternativePersistentIdentifier; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -21,14 +23,28 @@ public class RegisterDvObjectCommand extends AbstractVoidCommand { private final DvObject target; + private final Boolean migrateHandle; public RegisterDvObjectCommand(DataverseRequest aRequest, DvObject target) { super(aRequest, target); this.target = target; + this.migrateHandle = false; + } + + public RegisterDvObjectCommand(DataverseRequest aRequest, DvObject target, Boolean migrateHandle) { + super(aRequest, target); + this.target = target; + this.migrateHandle = migrateHandle; } @Override protected void executeImpl(CommandContext ctxt) throws CommandException { + + if(this.migrateHandle){ + //Only continue if you can successfully migrate the handle + if (!processMigrateHandle(ctxt)) return; + } + System.out.print("past migration"); String nonNullDefaultIfKeyNotFound = ""; String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); String authority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound); @@ -51,7 +67,6 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { target.setAuthority(authority); } } - if (idServiceBean.alreadyExists(target)) { return; } @@ -78,10 +93,10 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { for (DataFile df : dataset.getFiles()) { if (df.getIdentifier() == null || df.getIdentifier().isEmpty()) { df.setIdentifier(ctxt.files().generateDataFileIdentifier(df, idServiceBean)); - if (df.getProtocol() == null) { + if (df.getProtocol() == null || df.getProtocol().isEmpty()) { df.setProtocol(protocol); } - if (df.getAuthority() == null) { + if (df.getAuthority() == null || df.getAuthority().isEmpty()) { df.setAuthority(authority); } } @@ -115,6 +130,34 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { } catch (Throwable ex) { //do nothing - we'll know it failed because the global id create time won't have been updated. } + if(this.migrateHandle){ + //Only continue if you can successfully migrate the handle + if (!processMigrateHandle(ctxt)) return; + boolean doNormalSolrDocCleanUp = true; + ctxt.index().indexDataset((Dataset) target, doNormalSolrDocCleanUp); + ctxt.solrIndex().indexPermissionsForOneDvObject((Dataset)target); + } + } + + private Boolean processMigrateHandle (CommandContext ctxt){ + boolean retval = true; + if(!target.isInstanceofDataset()) return false; + if(!target.getProtocol().equals(GlobalId.HDL_PROTOCOL)) return false; + + AlternativePersistentIdentifier api = new AlternativePersistentIdentifier(); + api.setProtocol(target.getProtocol()); + api.setAuthority(target.getAuthority()); + api.setIdentifier(target.getIdentifier()); + api.setDvObject(target); + api.setIdentifierRegistered(target.isIdentifierRegistered()); + api.setGlobalIdCreateTime(target.getGlobalIdCreateTime()); + ctxt.em().persist(api); + target.setProtocol(null); + target.setAuthority(null); + target.setIdentifier(null); + target.setIdentifierRegistered(false); + target.setGlobalIdCreateTime(null); + return retval; } } From 260167aa7fd3f8a5013aa062c9dc274d156bc317 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 12 Sep 2018 10:41:16 -0400 Subject: [PATCH 021/114] #4832 getFiles via alternative Identifier --- .../dataverse/AlternativePersistentIdentifier.java | 12 +++++++++++- src/main/java/edu/harvard/iq/dataverse/Dataset.java | 9 +++++++++ src/main/java/edu/harvard/iq/dataverse/DvObject.java | 9 +++++++++ .../harvard/iq/dataverse/DvObjectServiceBean.java | 1 - .../engine/command/impl/RegisterDvObjectCommand.java | 1 + 5 files changed, 30 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java index f1c2b173b7e..189c243ceb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java @@ -41,7 +41,9 @@ public class AlternativePersistentIdentifier implements Serializable { private boolean identifierRegistered; - public Long getId() { + private boolean localFileDirectory; + + public Long getId() { return id; } @@ -97,4 +99,12 @@ public void setIdentifierRegistered(boolean identifierRegistered) { this.identifierRegistered = identifierRegistered; } + public boolean isLocalFileDirectory() { + return localFileDirectory; + } + + public void setLocalFileDirectory(boolean localFileDirectory) { + this.localFileDirectory = localFileDirectory; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 452ae7e3edf..22fba1eae00 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -485,6 +485,15 @@ public Path getFileSystemDirectory() { if (filesRootDirectory == null || filesRootDirectory.equals("")) { filesRootDirectory = "/tmp/files"; } + + if (this.getAlternativePersistentIndentifiers() != null && !this.getAlternativePersistentIndentifiers().isEmpty()) { + for (AlternativePersistentIdentifier api : this.getAlternativePersistentIndentifiers()) { + if (api.isLocalFileDirectory()) { + studyDir = Paths.get(filesRootDirectory, api.getAuthority(), api.getIdentifier()); + return studyDir; + } + } + } if (this.getAuthority() != null && this.getIdentifier() != null) { studyDir = Paths.get(filesRootDirectory, this.getAuthority(), this.getIdentifier()); diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index ef7b64a3539..9c091e2d34c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -136,6 +136,15 @@ public String visit(DataFile df) { @OneToMany(mappedBy = "dvObject", cascade = CascadeType.ALL, orphanRemoval = true) private Set alternativePersistentIndentifiers; + + public Set getAlternativePersistentIndentifiers() { + return alternativePersistentIndentifiers; + } + + public void setAlternativePersistentIndentifiers(Set alternativePersistentIndentifiers) { + this.alternativePersistentIndentifiers = alternativePersistentIndentifiers; + } + /** * previewImageAvailable could also be thought of as "thumbnail has been diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java index f4422e1e474..7fbca815f27 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java @@ -83,7 +83,6 @@ public DvObject findByGlobalId(String globalIdString, String typeString, Boolean query.setParameter("protocol", gid.getProtocol()); query.setParameter("authority", gid.getAuthority()); query.setParameter("dtype", typeString); - System.out.print(query.toString()); foundDvObject = (DvObject) query.getSingleResult(); } catch (javax.persistence.NoResultException e) { // (set to .info, this can fill the log file with thousands of diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java index a4c0ea720f9..f760785ab07 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java @@ -151,6 +151,7 @@ private Boolean processMigrateHandle (CommandContext ctxt){ api.setDvObject(target); api.setIdentifierRegistered(target.isIdentifierRegistered()); api.setGlobalIdCreateTime(target.getGlobalIdCreateTime()); + api.setLocalFileDirectory(true); ctxt.em().persist(api); target.setProtocol(null); target.setAuthority(null); From 397754dae8fe9a072c1be90180890cb277e9edfd Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 13 Sep 2018 10:12:30 -0400 Subject: [PATCH 022/114] #4832 Display Alternative PID on Dataset Page --- src/main/java/Bundle.properties | 2 ++ src/main/java/edu/harvard/iq/dataverse/Dataset.java | 13 +++++++++++++ src/main/webapp/dataset.xhtml | 3 ++- src/main/webapp/metadataFragment.xhtml | 12 ++++++++++++ 4 files changed, 29 insertions(+), 1 deletion(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 9f13bf4fdb7..57816a4d9a0 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1276,6 +1276,8 @@ dataset.metadata.publicationYear=Publication Year dataset.metadata.publicationYear.tip=The publication year of a dataset. dataset.metadata.persistentId=Dataset Persistent ID dataset.metadata.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +dataset.metadata.alternativePersistentId=Dataset Alternative Persistent ID +dataset.metadata.alternativePersistentId.tip=An alternative persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. file.metadata.persistentId=File Persistent ID file.metadata.persistentId.tip=The unique persistent identifier for a file, which can be a Handle or DOI in Dataverse. dataset.versionDifferences.termsOfUseAccess=Terms of Use and Access diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 22fba1eae00..ccff2dc0522 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -501,6 +501,19 @@ public Path getFileSystemDirectory() { return studyDir; } + + public String getAlternativePersistentIdentifier(){ + String retVal = null; + if (this.getAlternativePersistentIndentifiers() != null && !this.getAlternativePersistentIndentifiers().isEmpty()) { + for (AlternativePersistentIdentifier api : this.getAlternativePersistentIndentifiers()) { + retVal = retVal != null ? retVal + "; " : ""; + retVal += api.getProtocol() + ":"; + retVal += api.getAuthority() + "/"; + retVal += api.getIdentifier(); + } + } + return retVal; + } public String getNextMajorVersionString() { // Never need to get the next major version for harvested studies. diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 96844ed5119..1de2b19c134 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -632,7 +632,8 @@ - + + diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 524f6fdf6c5..a9f1a157c1c 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -28,6 +28,18 @@ #{globalId}
+ +
+ +
+ #{altPID} +
+
+
- - - - - - - -
-
- -
-

#{bundle['dataverse.link.dataset.choose']}

- - - - - - - - - - - - - -
+ +
+

+ + + + + + + + + + +

+
+ +
+ + + + + + + + + + +
- +
+ value="#{bundle['dataset.link.save']}" action="#{DatasetPage.saveLinkingDataverses()}"/>
- - +
From 40d958aeeda093a328cff9ac8690da2c87737d27 Mon Sep 17 00:00:00 2001 From: Pete Meyer Date: Thu, 20 Sep 2018 11:14:49 -0400 Subject: [PATCH 055/114] cleanup --- conf/docker-aio/setupIT.bash | 4 ---- .../java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java | 3 ++- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/conf/docker-aio/setupIT.bash b/conf/docker-aio/setupIT.bash index c02e7aa1765..528b8f3c5f8 100755 --- a/conf/docker-aio/setupIT.bash +++ b/conf/docker-aio/setupIT.bash @@ -11,7 +11,3 @@ cd /opt/dv/testdata /usr/local/glassfish4/glassfish/bin/asadmin deploy /opt/dv/dvinstall/dataverse.war ./post # modified from phoenix -# necessary for HarvestingServerIT as of dd4ba227c50507989ed011de7b1ef69432a6a96c -# switched this to HarvestingServerIT; clean this up when this test switches it back when done (or we decide that's not necessary) -#curl -X PUT -d 'true' "http://localhost:8080/api/admin/settings/:OAIServerEnabled" - diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java index 6f08155cb39..f02f014f601 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java @@ -37,7 +37,8 @@ public static void setUpClass() { @AfterClass public static void afterClass() { - //intentional no-op until there's cleanup to be done + // disable harvesting server (default value) + Response enableHarvestingServerResponse = UtilIT.setSetting(SettingsServiceBean.Key.OAIServerEnabled,"false"); } private void setupUsers() { From 6ad831a84c8dbf845f78d80ae044a7a0f03d665f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 20 Sep 2018 11:39:49 -0400 Subject: [PATCH 056/114] #4410 force query input to lower case to match NamedQuery --- .../java/edu/harvard/iq/dataverse/DataverseServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 23f677fcd44..78f57603779 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -452,7 +452,7 @@ public List filterDataversesForLinking(String query, DataverseRequest List dataverseList = new ArrayList<>(); List results = em.createNamedQuery("Dataverse.filterByName", Dataverse.class) - .setParameter("name", "%" + query + "%") + .setParameter("name", "%" + query.toLowerCase() + "%") .getResultList(); List alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + dataset.getId()).getResultList(); From a195d3b99d8fdf958d061c325174b1abf7406a97 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Thu, 20 Sep 2018 13:25:08 -0400 Subject: [PATCH 057/114] Capitalized file types displayed in facets from MimeTypeFacets.propertied, added comment to FileUtil.java outlining suggested captilization changes to the getFacetFileType function [ref #5067] --- src/main/java/MimeTypeFacets.properties | 92 +++++++++---------- .../harvard/iq/dataverse/util/FileUtil.java | 6 ++ 2 files changed, 52 insertions(+), 46 deletions(-) diff --git a/src/main/java/MimeTypeFacets.properties b/src/main/java/MimeTypeFacets.properties index f51987958d5..4e8b3b0f275 100644 --- a/src/main/java/MimeTypeFacets.properties +++ b/src/main/java/MimeTypeFacets.properties @@ -3,60 +3,60 @@ # For example, all image formats will be grouped under "image", etc. # # Documentation: -application/pdf=document -application/msword=document -application/vnd.ms-excel=document -application/vnd.openxmlformats-officedocument.spreadsheetml.sheet=document -application/vnd.openxmlformats-officedocument.wordprocessingml.document=document +application/pdf=Document +application/msword=Document +application/vnd.ms-excel=Document +application/vnd.openxmlformats-officedocument.spreadsheetml.sheet=Document +application/vnd.openxmlformats-officedocument.wordprocessingml.document=Document # Text: -text/plain=text -text/xml=text +text/plain=Text +text/xml=Text # Ingested text/tab-separated-values=Tabular Data # Data files: -text/tsv=data -text/csv=data -text/x-fixed-field=data -application/x-rlang-transport=data -type/x-r-syntax=data -application/x-R-2=data -application/x-stata=data -application/x-stata-6=data -application/x-stata-13=data -application/x-stata-14=data -application/x-stata-15=data -text/x-stata-syntax=data -application/x-spss-por=data -application/x-spss-sav=data -text/x-spss-syntax=data -application/x-sas-transport=data -application/x-sas-system=data -text/x-sas-syntax=data -application/x-dvn-csvspss-zip=data -application/x-dvn-tabddi-zip=data -application/fits=fits -application/zipped-shapefile=shape +text/tsv=Data +text/csv=Data +text/x-fixed-field=Data +application/x-rlang-transport=Data +type/x-r-syntax=Data +application/x-R-2=Data +application/x-stata=Data +application/x-stata-6=Data +application/x-stata-13=Data +application/x-stata-14=Data +application/x-stata-15=Data +text/x-stata-syntax=Data +application/x-spss-por=Data +application/x-spss-sav=Data +text/x-spss-syntax=Data +application/x-sas-transport=Data +application/x-sas-system=Data +text/x-sas-syntax=Data +application/x-dvn-csvspss-zip=Data +application/x-dvn-tabddi-zip=Data +application/fits=FITS +application/zipped-shapefile=Shape # Archive files: -application/zip=zip +application/zip=ZIP # Images files # (should be safe to just split the mime type on "/" in "image/*" though...) -image/gif=image -image/jpeg=image -image/x-portable-bitmap=image -image/x-portable-graymap=image -image/png=image -image/x-portable-anymap=image -image/x-portable-pixmap=image -image/cmu-raster=image -image/x-rgb=image -image/tiff=image -image/x-xbitmap=image -image/x-xpixmap=image -image/x-xwindowdump=image +image/gif=Image +image/jpeg=Image +image/x-portable-bitmap=Image +image/x-portable-graymap=Image +image/png=Image +image/x-portable-anymap=Image +image/x-portable-pixmap=Image +image/cmu-raster=Image +image/x-rgb=Image +image/tiff=Image +image/x-xbitmap=Image +image/x-xpixmap=Image +image/x-xwindowdump=Image # Network Data files -text/xml-graphml=networkdata +text/xml-graphml=Network Data # Other -application/octet-stream=unknown +application/octet-stream=Unknown # Dataverse-specific -application/vnd.dataverse.file-package=data \ No newline at end of file +application/vnd.dataverse.file-package=Data \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index c3809640d24..dc3ba0adf89 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -233,6 +233,12 @@ public static String getFacetFileType(DataFile dataFile) { // but it is probably still better than to tag them all as // "uknown". // -- L.A. 4.0 alpha 1 + // + // UPDATE, MH 4.9.2 + // Since production is displaying both "tabulardata" and "Tabular Data" + // we are going to try to add capitalization here to this function + // in order to capitalize all the unknown types that are not called + // out in MimeTypeFacets.properties return fileType.split("/")[0]; } } From 6a4c0082ab3005029fe2ffe0b8823cf25b32c198 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 20 Sep 2018 13:39:35 -0400 Subject: [PATCH 058/114] Added capitalization for the default facet file types. (#5067) --- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index dc3ba0adf89..b0a72b75e3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -218,7 +218,7 @@ public static String getUserFriendlyFileType(DataFile dataFile) { public static String getFacetFileType(DataFile dataFile) { String fileType = dataFile.getContentType(); - if (fileType != null) { + if (!StringUtil.isEmpty(fileType)) { if (fileType.contains(";")) { fileType = fileType.substring(0, fileType.indexOf(";")); } @@ -239,11 +239,12 @@ public static String getFacetFileType(DataFile dataFile) { // we are going to try to add capitalization here to this function // in order to capitalize all the unknown types that are not called // out in MimeTypeFacets.properties - return fileType.split("/")[0]; + String typeClass = fileType.split("/")[0]; + return Character.toUpperCase(typeClass.charAt(0)) + typeClass.substring(1); } } - return "unknown"; + return "Unknown"; } public static String getUserFriendlyOriginalType(DataFile dataFile) { From d6bc6c8f35f09326bd6352a965a2d65cda8ce37b Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 20 Sep 2018 14:32:50 -0400 Subject: [PATCH 059/114] removed the extra scripts from the installer makefile (#4966) --- scripts/installer/Makefile | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile index 991b642960d..9c265bb8a1d 100644 --- a/scripts/installer/Makefile +++ b/scripts/installer/Makefile @@ -9,17 +9,15 @@ JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd SOLR_SCHEMA=${INSTALLER_ZIP_DIR}/schema.xml SOLR_CONFIG=${INSTALLER_ZIP_DIR}/solrconfig.xml INSTALL_SCRIPT=${INSTALLER_ZIP_DIR}/install -GLASSFISH_STARTUP_SCRIPT=${INSTALLER_ZIP_DIR}/glassfish-startup -POSTGRES_SCRIPT=${INSTALLER_ZIP_DIR}/postgres-setup installer: dvinstall.zip clean: /bin/rm -rf ${INSTALLER_ZIP_DIR} dvinstall.zip -dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${INSTALL_SCRIPT} ${GLASSFISH_STARTUP_SCRIPT} ${POSTGRES_SCRIPT} +dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${INSTALL_SCRIPT} @echo making installer... - zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${INSTALL_SCRIPT} ${GLASSFISH_STARTUP_SCRIPT} ${POSTGRES_SCRIPT} + zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${INSTALL_SCRIPT} @echo @echo "Done!" @@ -47,16 +45,6 @@ ${GLASSFISH_SETUP_SCRIPT}: glassfish-setup.sh mkdir -p ${INSTALLER_ZIP_DIR} /bin/cp glassfish-setup.sh ${INSTALLER_ZIP_DIR} -${GLASSFISH_STARTUP_SCRIPT}: glassfish-startup - @echo copying glassfish startup - mkdir -p ${INSTALLER_ZIP_DIR} - /bin/cp glassfish-startup ${INSTALLER_ZIP_DIR} - -${POSTGRES_SCRIPT}: postgres-setup - @echo copying postgres-setup - mkdir -p ${INSTALLER_ZIP_DIR} - /bin/cp postgres-setup ${INSTALLER_ZIP_DIR} - ${POSTGRES_DRIVERS}: pgdriver/postgresql-42.2.2.jar @echo copying postgres driver @mkdir -p ${POSTGRES_DRIVERS} From 9dbf2be356173ebf77b3369fc5a99e7085b8dc52 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 20 Sep 2018 15:09:11 -0400 Subject: [PATCH 060/114] put port 8080 in clickable link to avoid browser warnings #4990 --- scripts/installer/ec2-create-instance.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index f2d5556b682..1f5931c8c92 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -80,8 +80,12 @@ sed -i "s/branch:/branch: $BRANCH_NAME/" dataverse/defaults/main.yml ansible-playbook -i dataverse/inventory dataverse/dataverse.pb --connection=local EOF +#Port 8080 has been added because Ansible puts a redirect in place +#from HTTP to HTTPS and the cert is invalid (self-signed), forcing +#the user to click through browser warnings. +CLICKABLE_LINK="http://${PUBLIC_DNS}:8080" echo "To ssh into the new instance:" echo "ssh -i $PEM_FILE $USER_AT_HOST" -echo "Branch \"$BRANCH_NAME\" has been deployed to http://${PUBLIC_DNS}" +echo "Branch \"$BRANCH_NAME\" has been deployed to $CLICKABLE_LINK" echo "When you are done, please terminate your instance with:" echo "aws ec2 terminate-instances --instance-ids $INSTANCE_ID" From d438eba85e4e0f6081a4ed6bf0e5907d0c4173f4 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Thu, 20 Sep 2018 15:29:04 -0400 Subject: [PATCH 061/114] Added svg to footer with png fallback #5009 --- src/main/webapp/dataverse_footer.xhtml | 2 +- .../images/dataverse_project_logo.svg | 115 ++++++++++++++++++ 2 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 src/main/webapp/resources/images/dataverse_project_logo.svg diff --git a/src/main/webapp/dataverse_footer.xhtml b/src/main/webapp/dataverse_footer.xhtml index b39d3717756..ab47f3ea783 100644 --- a/src/main/webapp/dataverse_footer.xhtml +++ b/src/main/webapp/dataverse_footer.xhtml @@ -35,7 +35,7 @@
diff --git a/src/main/webapp/resources/images/dataverse_project_logo.svg b/src/main/webapp/resources/images/dataverse_project_logo.svg new file mode 100644 index 00000000000..9e56ba4ce09 --- /dev/null +++ b/src/main/webapp/resources/images/dataverse_project_logo.svg @@ -0,0 +1,115 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From d1f9f41418275ac63facba8b6f217dcfac8aaae5 Mon Sep 17 00:00:00 2001 From: matthew-a-dunlap Date: Thu, 20 Sep 2018 15:31:02 -0400 Subject: [PATCH 062/114] Added note about prov test using png #5009 --- src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java index 17bee9b52ac..52143eb9981 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java @@ -168,6 +168,7 @@ public void testAddProvFile() { assertEquals(200, publishDataset.getStatusCode()); //We want to publish a 2nd version to confirm metadata is being passed over between version + //Note: this UI file is just being used as an arbitrary file for upload testing String pathToFile2 = "src/main/webapp/resources/images/dataverseproject_logo.png"; Response authorAddsFile2 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, apiTokenForDepositor); authorAddsFile2.prettyPrint(); From 0f416087cd1b3b0614066eb1468b8780e9688e67 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 20 Sep 2018 16:08:33 -0400 Subject: [PATCH 063/114] removed the second, broken version of create_pg_hash. --- scripts/installer/install | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/scripts/installer/install b/scripts/installer/install index f31b76c9fc3..f3ee34ac92f 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -1518,26 +1518,3 @@ sub import_reference_data { } } -sub create_pg_hash { - my $pg_username = shift @_; - my $pg_password = shift @_; - - my $encode_line = $pg_password . $pg_username; - - # for Redhat: - - ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; - - my $hash; - $hash = `/bin/echo -n $encode_line | md5sum`; - - chop $hash; - - $hash =~ s/ \-$//; - - if ( ( length($hash) != 32 ) || ( $hash !~ /^[0-9a-f]*$/ ) ) { - print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n"; - exit 1; - } - return $hash; -} From be995cc8fc4db2ead02af4ac3439dca5e609f83c Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 20 Sep 2018 16:37:47 -0400 Subject: [PATCH 064/114] replace sed command with --extra-vars arg #4990 --- scripts/installer/ec2-create-instance.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index 1f5931c8c92..d525636e1c7 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -75,9 +75,14 @@ echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" is being ssh -T -i $PEM_FILE -o 'StrictHostKeyChecking no' -o 'UserKnownHostsFile=/dev/null' -o 'ConnectTimeout=300' $USER_AT_HOST < Date: Thu, 20 Sep 2018 16:44:58 -0400 Subject: [PATCH 065/114] #4410 prevent linking to unpublished ds via api --- src/main/java/Bundle.properties | 1 + .../iq/dataverse/engine/command/impl/LinkDatasetCommand.java | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 256c740d96c..0a4457b403f 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -686,6 +686,7 @@ dataverse.link.save=Save Linked Dataverse dataset.link.save=Save Linked Dataset dataset.link.not.to.owner=Can't link a dataset to its dataverse dataset.link.not.to.parent.dataverse=Can't link a dataset to its parent dataverses +dataset.link.not.published=Can't link a dataset that has not been published dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. dataverse.link.dataset.none=No linkable dataverses available. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java index e3cfb1ad421..8e4f6370414 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java @@ -38,13 +38,16 @@ public LinkDatasetCommand(DataverseRequest aRequest, Dataverse dataverse, Datase @Override public DatasetLinkingDataverse execute(CommandContext ctxt) throws CommandException { + if (!linkedDataset.isReleased()) { + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.link.not.published"), this); + } if (linkedDataset.getOwner().equals(linkingDataverse)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.link.not.to.owner"), this); } if (linkedDataset.getOwner().getOwners().contains(linkingDataverse)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.link.not.to.parent.dataverse"), this); } - + DatasetLinkingDataverse datasetLinkingDataverse = new DatasetLinkingDataverse(); datasetLinkingDataverse.setDataset(linkedDataset); datasetLinkingDataverse.setLinkingDataverse(linkingDataverse); From c90f7629934e5f5a95b51b66c7b7650b8ec9ef09 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Thu, 20 Sep 2018 19:28:27 -0400 Subject: [PATCH 066/114] more fixes/error checks for the installer (port number!) --- scripts/installer/install | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/scripts/installer/install b/scripts/installer/install index f3ee34ac92f..40039a742ae 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -1294,15 +1294,15 @@ sub setup_postgres { # 3d. CHECK IF WE CAN TALK TO POSTGRES AS THE ADMIN: - if ($psql_exec_path eq "" || system( $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) + if ($psql_exec_path eq "" || system( $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) { # No, we can't. :( if ($pg_local_connection || $noninteractive) { # If Postgres is running locally, this is a fatal condition. # We'll give them some (potentially) helpful pointers and exit. - print $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . "\n"; + print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1) \n"; print "Nope, I haven't been able to connect to the local instance of PostgresQL as the admin user.\n"; print "\nIs postgresql running? \n"; print " On a RedHat-like system, you can check the status of the daemon with\n\n"; @@ -1327,7 +1327,7 @@ sub setup_postgres { # without opening remote access for the admin user. They will simply # have to run this script in the "postgres-only" mode on that server, locally, # then resume the installation here: - print ($psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1 \n" ); + print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1)\n\n"; print "Haven't been able to connect to the remote Postgres server as the admin user.\n"; print "(Or you simply don't have psql installed on this server)\n"; print "It IS possible to configure a database for your Dataverse on a remote server,\n"; @@ -1356,7 +1356,7 @@ sub setup_postgres { # 4c. CHECK IF THIS DB ALREADY EXISTS: my $psql_command_dbcheck = - $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $POSTGRES_ADMIN_USER . " -c '' -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; + $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -c '' -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; if ( ( my $exitcode = system($psql_command_dbcheck) ) == 0 ) { @@ -1419,7 +1419,7 @@ sub setup_postgres { print TMPCMD $sql_command; close TMPCMD; - my $psql_commandline = $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -f /tmp/pgcmd.$$.tmp >/dev/null 2>&1"; + my $psql_commandline = $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -f /tmp/pgcmd.$$.tmp >/dev/null 2>&1"; my $out = qx($psql_commandline 2>&1); $exitcode = $?; @@ -1442,7 +1442,7 @@ sub setup_postgres { my $psql_command = $psql_exec_path - . "/createdb -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U $CONFIG_DEFAULTS{'POSTGRES_USER'} " + . "/createdb -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U $CONFIG_DEFAULTS{'POSTGRES_USER'} " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " --owner=" . $CONFIG_DEFAULTS{'POSTGRES_USER'}; @@ -1470,7 +1470,7 @@ sub setup_postgres { # verify that we can talk to that database, with the credentials of the database # user that we want the Dataverse application to be using: - if ( $psql_exec_path ne "" && system( $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) + if ( $psql_exec_path ne "" && system( $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) { print STDERR "Oops, haven't been able to connect to the database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ",\n"; print STDERR "running on " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . ", as user " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . ".\n\n"; @@ -1485,8 +1485,11 @@ sub import_reference_data { # (we have already verified that the referenceData.sql file exists) my $psql_command = $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} - . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " - . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -f" . $REFERENCE_DATA_SQL; + . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} + . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} + . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -f " . $REFERENCE_DATA_SQL; + + print "EXECUTING PSQL COMMAND: $psql_command\n"; unless ( ( my $exitcode = system("$psql_command") ) == 0 ) { print "WARNING: Could not pre-populate Postgres database for the Dataverse application!\n"; From 5c053a62e680e6fb61ca2777bc46283e949c2cba Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 20 Sep 2018 20:13:57 -0400 Subject: [PATCH 067/114] support non-IQSS repos #4990 --- .../source/developers/deployment.rst | 8 +++- scripts/installer/ec2-create-instance.sh | 47 ++++++++++++++----- 2 files changed, 41 insertions(+), 14 deletions(-) diff --git a/doc/sphinx-guides/source/developers/deployment.rst b/doc/sphinx-guides/source/developers/deployment.rst index 3142b3f8312..c1321a40330 100755 --- a/doc/sphinx-guides/source/developers/deployment.rst +++ b/doc/sphinx-guides/source/developers/deployment.rst @@ -52,9 +52,13 @@ Once you have done the configuration above, you are ready to try running the "cr Download :download:`ec2-create-instance.sh <../../../../scripts/installer/ec2-create-instance.sh>` and put it somewhere reasonable. For the purpose of these instructions we'll assume it's in the "Downloads" directory in your home directory. -Select a branch you'd like to deploy from https://github.com/IQSS/dataverse/branches/all . (In the future, we would like to support forks of Dataverse, so that the branch doesn't have to be under the "IQSS" GitHub organization.) In the example below we will use the "develop" branch. +You need to decide which branch you'd like to deploy to AWS. Select a branch from https://github.com/IQSS/dataverse/branches/all such as "develop" and pass it to the script with ``-b`` as in the following example. (Branches such as "master" and "develop" are described in the :doc:`version-control` section.) -``bash ~/Downloads/ec2-create-instance.sh develop`` +``bash ~/Downloads/ec2-create-instance.sh -b develop`` + +You must specify the branch with ``-b`` but you can also specify a non-IQSS git repo URL with ``-r`` as in the following example. + +``bash ~/Downloads/ec2-create-instance.sh -b develop -r https://github.com/scholarsportal/dataverse.git`` Now you will need to wait at least 5 or 10 minutes until the deployment is finished. Eventually, the output should tell you how to access the installation of Dataverse in a web browser or via ssh. It will also provide instructions on how to delete the instance when you are finished with it. Please be aware that AWS charges per minute for a running instance. You can also delete your instance from https://console.aws.amazon.com/console/home?region=us-east-1 . diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index d525636e1c7..2533e9f3c37 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -1,7 +1,29 @@ #!/bin/bash -x #Initially Referred to this doc: https://docs.aws.amazon.com/cli/latest/userguide/tutorial-ec2-ubuntu.html -#TODO: allow arbitrary repo, not just IQSS. Will require changing it on the ansible side as well +SUGGESTED_REPO_URL='https://github.com/IQSS/dataverse.git' +SUGGESTED_BRANCH='develop' + +usage() { + echo "Usage: $0 -r $REPO_URL -b $SUGGESTED_BRANCH" 1>&2 + exit 1 +} + +REPO_URL=$SUGGESTED_REPO_URL + +while getopts ":r:b:" o; do + case "${o}" in + r) + REPO_URL=${OPTARG} + ;; + b) + BRANCH_NAME=${OPTARG} + ;; + *) + usage + ;; + esac +done #Make sure "aws" binary is available AWS_CLI_VERSION=$(aws --version) @@ -10,15 +32,16 @@ if [[ "$?" -ne 0 ]]; then exit 1 fi -if [ "$1" = "" ]; then - echo "No branch name provided" +if [ "$BRANCH_NAME" = "" ]; then + echo "No branch name provided. You could try adding \"-b $SUGGESTED_BRANCH\" or other branches listed at $SUGGESTED_REPO_URL" + usage + exit 1 +fi + +if [[ $(git ls-remote --heads $REPO_URL $BRANCH_NAME | wc -l) -eq 0 ]]; then + echo "Branch \"$BRANCH_NAME\" does not exist at $REPO_URL" + usage exit 1 -else - BRANCH_NAME=$1 - if [[ $(git ls-remote --heads https://github.com/IQSS/dataverse.git $BRANCH_NAME | wc -l) -eq 0 ]]; then - echo "Branch does not exist on the Dataverse github repo" - exit 1 - fi fi #Create security group if it doesn't already exist @@ -66,7 +89,7 @@ USER_AT_HOST="centos@${PUBLIC_DNS}" echo "New instance created with ID \"$INSTANCE_ID\". To ssh into it:" echo "ssh -i $PEM_FILE $USER_AT_HOST" -echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" is being deployed." +echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." #ssh into instance now and run ansible stuff #Note: an attempt was made to pass the branch name in the ansible-playbook call @@ -82,7 +105,7 @@ git checkout extra-vars-travis cd .. # FIXME: The lines to remove are above. export ANSIBLE_ROLES_PATH=. -ansible-playbook -i dataverse/inventory dataverse/dataverse.pb --connection=local --extra-vars "branch=$BRANCH_NAME repo=https://github.com/IQSS/dataverse.git" +ansible-playbook -i dataverse/inventory dataverse/dataverse.pb --connection=local --extra-vars "branch=$BRANCH_NAME repo=$REPO_URL" EOF #Port 8080 has been added because Ansible puts a redirect in place @@ -91,6 +114,6 @@ EOF CLICKABLE_LINK="http://${PUBLIC_DNS}:8080" echo "To ssh into the new instance:" echo "ssh -i $PEM_FILE $USER_AT_HOST" -echo "Branch \"$BRANCH_NAME\" has been deployed to $CLICKABLE_LINK" +echo "Branch \"$BRANCH_NAME\" from $REPO_URL has been deployed to $CLICKABLE_LINK" echo "When you are done, please terminate your instance with:" echo "aws ec2 terminate-instances --instance-ids $INSTANCE_ID" From 9399b789acdefe91cda2e4d2bec8977ef174d8d8 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 21 Sep 2018 14:11:51 -0400 Subject: [PATCH 068/114] #5052 Fix Display of Explore Button --- src/main/webapp/file-download-button-fragment.xhtml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index cd10c13d91e..e24dc2eedd5 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -51,12 +51,12 @@
- + - + From bc92d0e955fbd472e66a0d44ff3dc388b6f05768 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 21 Sep 2018 15:38:05 -0400 Subject: [PATCH 069/114] document quick fix for new devs #5078 --- .../source/developers/dev-environment.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index fc1d99fc289..bcb79afa380 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -144,6 +144,16 @@ To install Solr, execute the following commands: ``bin/solr create_core -c collection1 -d server/solr/collection1/conf`` +Quick Fix for Bundle Problem +---------------------------- + +Please see https://github.com/IQSS/dataverse/issues/5078 for what this is about. We hope to remove this quick fix soon. + +``mkdir /tmp/lang`` + +``cp src/main/java/Bundle.properties /tmp/lang`` + + Run the Dataverse Installer Script ---------------------------------- From 576e87e470551d4aa53acc3291b0e29f56cff744 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 21 Sep 2018 16:12:39 -0400 Subject: [PATCH 070/114] extra checks for the success of copying operations. --- scripts/installer/install | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/scripts/installer/install b/scripts/installer/install index 40039a742ae..cf90cd55eb9 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -483,8 +483,6 @@ for my $ENTRY (@CONFIG_VARIABLES) unless ( -d $g_dir . "/glassfish/domains/domain1" ) { - # TODO: need better check than this - while ( !( -d $g_dir . "/glassfish/domains/domain1" ) ) { print "\nInvalid Glassfish directory " . $g_dir . "!\n"; @@ -509,7 +507,7 @@ for my $ENTRY (@CONFIG_VARIABLES) } - print "OK!\n"; + print "$g_dir looks OK!\n"; $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $g_dir; } @@ -869,7 +867,15 @@ sub setup_glassfish { # add a check. print "\nInstalling the Glassfish PostgresQL driver... "; - system( "/bin/cp", "pgdriver/" . $postgres_jdbc, $glassfish_dir . "/glassfish/lib" ); +# system( "/bin/cp", "pgdriver/" . $postgres_jdbc, $glassfish_dir . "/glassfish/lib" ); + my $pgdriver_success = copy("pgdriver/" . $postgres_jdbc, $glassfish_dir . "/glassfish/lib" ); + + unless ($pgdriver_success) + { + print "\n*********************\n"; + print "ERROR! Failed to copy the postgres driver into " . $glassfish_dir . "/glassfish/lib - check the directory permissions!\n"; + exit 1; + } # more diagnostics needed? @@ -972,7 +978,14 @@ sub setup_glassfish { print "\nCopying additional configuration files... "; - system( "/bin/cp -f " . $JHOVE_CONF_SCHEMA_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); + #system( "/bin/cp -f " . $JHOVE_CONF_SCHEMA_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); + my $jhove_success = copy ($JHOVE_CONF_SCHEMA_DIST, $glassfish_dir . "/glassfish/domains/domain1/config"); + unless ($jhove_success) + { + print "\n*********************\n"; + print "ERROR: failed to copy jhove config file into " . $glassfish_dir . "/glassfish/domains/domain1/config - do you have write permission in that directory?"; + exit 1; + } # The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...) # - so it may need to be readjusted here: From 713a8961dbb0be4c0a3e5c9015bc0e737ffeff2f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 21 Sep 2018 16:21:24 -0400 Subject: [PATCH 071/114] switch to non-nested extra vars #4990 The changes necessary at https://github.com/IQSS/dataverse-ansible - cfb8fdb de-nest git variables for extra-vars compatibility - 45f714c bump group_vars for vagrant testing - 7a5ef0f flatten branch and repo variables for extra-vars compatibility --- scripts/installer/ec2-create-instance.sh | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index 2533e9f3c37..cf5039233f2 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -91,21 +91,12 @@ echo "ssh -i $PEM_FILE $USER_AT_HOST" echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." -#ssh into instance now and run ansible stuff -#Note: an attempt was made to pass the branch name in the ansible-playbook call -# via -e "dataverse.branch=$BRANCH_NAME", but it gets overwritten due to the order -# of operations for where ansible looks for variables. ssh -T -i $PEM_FILE -o 'StrictHostKeyChecking no' -o 'UserKnownHostsFile=/dev/null' -o 'ConnectTimeout=300' $USER_AT_HOST < Date: Fri, 21 Sep 2018 16:32:20 -0400 Subject: [PATCH 072/114] more and better checks for write permissions on the glassfish dir. --- scripts/installer/install | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/scripts/installer/install b/scripts/installer/install index cf90cd55eb9..95be5c14214 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -492,21 +492,26 @@ for my $ENTRY (@CONFIG_VARIABLES) $g_dir = <>; chop $g_dir; } + } - # TODO: - # verify that we can write in the Glassfish directory - # (now that we are no longer requiring to run the installer as root) + # verify that we can write in the Glassfish directory + # (now that we are no longer requiring to run the installer as root) - my $g_testdir = $g_dir . "/glassfish/domains/domain1"; - my $g_libdir = $g_dir . "/glassfish/lib"; - if (!(-w $g_testdir)) { - die("$g_testdir not writable. Have you created a glassfish user, and given it write permission on $g_testdir?\n"); - } elsif (!(-w $g_libdir)) { - die("$g_libdir not writable. Have you created a glassfish user, and given it write permission on $g_libdir?\n"); - } + my @g_testdirs = ( "/glassfish/domains/domain1", + "/glassfish/domains/domain1/config", + "/glassfish/lib"); + for my $test_dir (@g_testdirs) + { + if (!(-w ($g_dir . $test_dir))) + { + print "\n"; + die("ERROR: " . $g_dir . $test_dir . " not writable. Have you created a glassfish user, and given it write permission on the glassfish hierarchy?\n"); + } } + + print "$g_dir looks OK!\n"; $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $g_dir; From 41f4df2911fe73620d227b4a7cd1eb0057ba6fba Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 21 Sep 2018 16:43:22 -0400 Subject: [PATCH 073/114] a better error message when the glassfish hierarchy is not writeable. --- scripts/installer/install | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/installer/install b/scripts/installer/install index 95be5c14214..6a0bca21ab8 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -506,7 +506,7 @@ for my $ENTRY (@CONFIG_VARIABLES) if (!(-w ($g_dir . $test_dir))) { print "\n"; - die("ERROR: " . $g_dir . $test_dir . " not writable. Have you created a glassfish user, and given it write permission on the glassfish hierarchy?\n"); + die("ERROR: " . $g_dir . $test_dir . " not writable to the user running the installer! Check permissions on the glassfish hierarchy.\n"); } } From 73179984c2f972faed875d944acd917337ed7640 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Fri, 21 Sep 2018 16:45:45 -0400 Subject: [PATCH 074/114] cleanup #4990 --- scripts/installer/ec2-create-instance.sh | 38 +++++++++++++----------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index cf5039233f2..687e271c405 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -1,5 +1,6 @@ -#!/bin/bash -x -#Initially Referred to this doc: https://docs.aws.amazon.com/cli/latest/userguide/tutorial-ec2-ubuntu.html +#!/bin/bash + +# For docs, see the "Deployment" page in the Dev Guide. SUGGESTED_REPO_URL='https://github.com/IQSS/dataverse.git' SUGGESTED_BRANCH='develop' @@ -25,7 +26,6 @@ while getopts ":r:b:" o; do esac done -#Make sure "aws" binary is available AWS_CLI_VERSION=$(aws --version) if [[ "$?" -ne 0 ]]; then echo 'The "aws" program could not be executed. Is it in your $PATH?' @@ -44,7 +44,6 @@ if [[ $(git ls-remote --heads $REPO_URL $BRANCH_NAME | wc -l) -eq 0 ]]; then exit 1 fi -#Create security group if it doesn't already exist SECURITY_GROUP='dataverse-sg' GROUP_CHECK=$(aws ec2 describe-security-groups --group-name $SECURITY_GROUP) if [[ "$?" -ne 0 ]]; then @@ -70,17 +69,21 @@ else exit 1 fi -#AMI ID for centos7 acquired by this (very slow) query Sept 10th 2018 -#This does not need to be run every time, leaving it in here so it is remembered -#aws ec2 describe-images --owners 'aws-marketplace' --filters 'Name=product-code,Values=aw0evgkw8e5c1q413zgy5pjce' --query 'sort_by(Images, &CreationDate)[-1].[ImageId]' --output 'text' - -#The AMI ID only works for region us-east-1, for now just forcing that -#Using this image ID a 1-time requires subscription per root account, which was done through the UI -#Also, change the instance size as your own peril. Previous attempts of setting it smaller than medium have caused solr and maven to crash weirdly during install -echo "*Creating ec2 instance" -INSTANCE_ID=$(aws ec2 run-instances --image-id ami-9887c6e7 --security-groups $SECURITY_GROUP --count 1 --instance-type t2.medium --key-name $KEY_NAME --query 'Instances[0].InstanceId' --block-device-mappings '[ { "DeviceName": "/dev/sda1", "Ebs": { "DeleteOnTermination": true } } ]' | tr -d \") +# The AMI ID may change in the future and the way to look it up is with the +# following command, which takes a long time to run: +# +# aws ec2 describe-images --owners 'aws-marketplace' --filters 'Name=product-code,Values=aw0evgkw8e5c1q413zgy5pjce' --query 'sort_by(Images, &CreationDate)[-1].[ImageId]' --output 'text' +# +# To use this AMI, we subscribed to it from the AWS GUI. +# AMI IDs are specific to the region. +AMI_ID='ami-9887c6e7' +# Smaller than medium lead to Maven and Solr problems. +SIZE='t2.medium' +echo "Creating EC2 instance" +# TODO: Add some error checking for "ec2 run-instances". +INSTANCE_ID=$(aws ec2 run-instances --image-id $AMI_ID --security-groups $SECURITY_GROUP --count 1 --instance-type $SIZE --key-name $KEY_NAME --query 'Instances[0].InstanceId' --block-device-mappings '[ { "DeviceName": "/dev/sda1", "Ebs": { "DeleteOnTermination": true } } ]' | tr -d \") echo "Instance ID: "$INSTANCE_ID -echo "*End creating EC2 instance" +echo "End creating EC2 instance" PUBLIC_DNS=$(aws ec2 describe-instances --instance-ids $INSTANCE_ID --query "Reservations[*].Instances[*].[PublicDnsName]" --output text) PUBLIC_IP=$(aws ec2 describe-instances --instance-ids $INSTANCE_ID --query "Reservations[*].Instances[*].[PublicIpAddress]" --output text) @@ -91,6 +94,7 @@ echo "ssh -i $PEM_FILE $USER_AT_HOST" echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." +# TODO: Add some error checking for this ssh command. ssh -T -i $PEM_FILE -o 'StrictHostKeyChecking no' -o 'UserKnownHostsFile=/dev/null' -o 'ConnectTimeout=300' $USER_AT_HOST < Date: Fri, 21 Sep 2018 17:43:54 -0400 Subject: [PATCH 075/114] Doc fix and remove unneeded install #4990 --- doc/sphinx-guides/source/developers/deployment.rst | 4 ++-- scripts/installer/ec2-create-instance.sh | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/deployment.rst b/doc/sphinx-guides/source/developers/deployment.rst index c1321a40330..87a81c05fc9 100755 --- a/doc/sphinx-guides/source/developers/deployment.rst +++ b/doc/sphinx-guides/source/developers/deployment.rst @@ -10,7 +10,7 @@ Developers often only deploy Dataverse to their :doc:`dev-environment` but it ca Deploying Dataverse to Amazon Web Services (AWS) ------------------------------------------------ -We have written some scripts to deploy Dataverse to Amazon Web Services (AWS) but they require some setup. +We have written scripts to deploy Dataverse to Amazon Web Services (AWS) but they require some setup. Install AWS CLI ~~~~~~~~~~~~~~~ @@ -60,7 +60,7 @@ You must specify the branch with ``-b`` but you can also specify a non-IQSS git ``bash ~/Downloads/ec2-create-instance.sh -b develop -r https://github.com/scholarsportal/dataverse.git`` -Now you will need to wait at least 5 or 10 minutes until the deployment is finished. Eventually, the output should tell you how to access the installation of Dataverse in a web browser or via ssh. It will also provide instructions on how to delete the instance when you are finished with it. Please be aware that AWS charges per minute for a running instance. You can also delete your instance from https://console.aws.amazon.com/console/home?region=us-east-1 . +Now you will need to wait around 10 minutes until the deployment is finished. Eventually, the output should tell you how to access the installation of Dataverse in a web browser or via ssh. It will also provide instructions on how to delete the instance when you are finished with it. Please be aware that AWS charges per minute for a running instance. You can also delete your instance from https://console.aws.amazon.com/console/home?region=us-east-1 . Caveats ~~~~~~~ diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index 687e271c405..97f96b47716 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -92,11 +92,10 @@ USER_AT_HOST="centos@${PUBLIC_DNS}" echo "New instance created with ID \"$INSTANCE_ID\". To ssh into it:" echo "ssh -i $PEM_FILE $USER_AT_HOST" -echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." +echo "Please wait at least 10 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." # TODO: Add some error checking for this ssh command. ssh -T -i $PEM_FILE -o 'StrictHostKeyChecking no' -o 'UserKnownHostsFile=/dev/null' -o 'ConnectTimeout=300' $USER_AT_HOST < Date: Fri, 21 Sep 2018 17:57:50 -0400 Subject: [PATCH 076/114] 15 minutes and add back epel #4990 --- doc/sphinx-guides/source/developers/deployment.rst | 2 +- scripts/installer/ec2-create-instance.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/sphinx-guides/source/developers/deployment.rst b/doc/sphinx-guides/source/developers/deployment.rst index 87a81c05fc9..4d0aad44020 100755 --- a/doc/sphinx-guides/source/developers/deployment.rst +++ b/doc/sphinx-guides/source/developers/deployment.rst @@ -60,7 +60,7 @@ You must specify the branch with ``-b`` but you can also specify a non-IQSS git ``bash ~/Downloads/ec2-create-instance.sh -b develop -r https://github.com/scholarsportal/dataverse.git`` -Now you will need to wait around 10 minutes until the deployment is finished. Eventually, the output should tell you how to access the installation of Dataverse in a web browser or via ssh. It will also provide instructions on how to delete the instance when you are finished with it. Please be aware that AWS charges per minute for a running instance. You can also delete your instance from https://console.aws.amazon.com/console/home?region=us-east-1 . +Now you will need to wait around 15 minutes until the deployment is finished. Eventually, the output should tell you how to access the installation of Dataverse in a web browser or via ssh. It will also provide instructions on how to delete the instance when you are finished with it. Please be aware that AWS charges per minute for a running instance. You can also delete your instance from https://console.aws.amazon.com/console/home?region=us-east-1 . Caveats ~~~~~~~ diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index 97f96b47716..b46fb09e3ff 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -92,11 +92,11 @@ USER_AT_HOST="centos@${PUBLIC_DNS}" echo "New instance created with ID \"$INSTANCE_ID\". To ssh into it:" echo "ssh -i $PEM_FILE $USER_AT_HOST" -echo "Please wait at least 10 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." +echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." # TODO: Add some error checking for this ssh command. ssh -T -i $PEM_FILE -o 'StrictHostKeyChecking no' -o 'UserKnownHostsFile=/dev/null' -o 'ConnectTimeout=300' $USER_AT_HOST < Date: Fri, 21 Sep 2018 18:48:13 -0400 Subject: [PATCH 077/114] Install epel-release before, add comment #4990 --- scripts/installer/ec2-create-instance.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/installer/ec2-create-instance.sh b/scripts/installer/ec2-create-instance.sh index b46fb09e3ff..fe84c115f84 100755 --- a/scripts/installer/ec2-create-instance.sh +++ b/scripts/installer/ec2-create-instance.sh @@ -94,9 +94,11 @@ echo "ssh -i $PEM_FILE $USER_AT_HOST" echo "Please wait at least 15 minutes while the branch \"$BRANCH_NAME\" from $REPO_URL is being deployed." +# epel-release is installed first to ensure the latest ansible is installed after # TODO: Add some error checking for this ssh command. ssh -T -i $PEM_FILE -o 'StrictHostKeyChecking no' -o 'UserKnownHostsFile=/dev/null' -o 'ConnectTimeout=300' $USER_AT_HOST < Date: Mon, 24 Sep 2018 10:02:18 -0400 Subject: [PATCH 078/114] not the spin up script from Installation Guide #4990 Also put Ansible before Puppet since dataverse-puppet hasn't been updated since March 2016. --- doc/sphinx-guides/source/installation/prep.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/prep.rst b/doc/sphinx-guides/source/installation/prep.rst index 959784b1ee4..37a8a2917b1 100644 --- a/doc/sphinx-guides/source/installation/prep.rst +++ b/doc/sphinx-guides/source/installation/prep.rst @@ -38,10 +38,12 @@ Installing Dataverse involves some system configuration followed by executing an Advanced Installation +++++++++++++++++++++ -There are some community-lead projects to use configuration management tools such as Puppet and Ansible to automate Dataverse installation and configuration, but support for these solutions is limited to what the Dataverse community can offer as described in each project's webpage: +There are some community-lead projects to use configuration management tools such as Ansible and Puppet to automate Dataverse installation and configuration, but support for these solutions is limited to what the Dataverse community can offer as described in each project's webpage: -- https://github.com/IQSS/dataverse-puppet - https://github.com/IQSS/dataverse-ansible +- https://github.com/IQSS/dataverse-puppet + +(Please note that the "dataverse-ansible" repo is used in a script that allows Dataverse to be installed on Amazon Web Services (AWS) from arbitrary GitHub branches as described in the :doc:`/developers/deployment` section of the Developer Guide.) The Dataverse development team is happy to "bless" additional community efforts along these lines (i.e. Docker, Chef, Salt, etc.) by creating a repo under https://github.com/IQSS and managing team access. From 2fa7953fe96b0e4860178313efeb5518a94989fd Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 24 Sep 2018 11:06:20 -0400 Subject: [PATCH 079/114] #3243 Force re-index of Dataset when file is added to existing draft --- .../java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 07b58e25f1f..29829023cd7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestUtil; import edu.harvard.iq.dataverse.search.FileView; +import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; @@ -109,6 +110,8 @@ public enum FileEditMode { SystemConfig systemConfig; @EJB DataverseLinkingServiceBean dvLinkingService; + @EJB + IndexServiceBean indexService; @Inject DataverseRequestServiceBean dvRequestService; @Inject PermissionsWrapper permissionsWrapper; @@ -1363,7 +1366,7 @@ public String save() { //if (newDraftVersion) { // return returnToDraftVersionById(); //} - + indexService.indexDataset(dataset, true); logger.fine("Redirecting to the dataset page, from the edit/upload page."); return returnToDraftVersion(); } From 8c431334038987ae84ca89f9872f269010cf2371 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Mon, 24 Sep 2018 12:16:13 -0400 Subject: [PATCH 080/114] Removed hardcoded text from java and added bundle reference [ref #5067] --- src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index b0a72b75e3c..f18a7442ac4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -244,7 +244,7 @@ public static String getFacetFileType(DataFile dataFile) { } } - return "Unknown"; + return ResourceBundle.getBundle("MimeTypeFacets").getString("application/octet-stream"); } public static String getUserFriendlyOriginalType(DataFile dataFile) { From c17c979aecd52539bce61d0a98f973a813f132c2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 24 Sep 2018 13:59:55 -0400 Subject: [PATCH 081/114] #5085 Fix Failing Test --- .../java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index d79ef83d287..a080a078e4d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1426,6 +1426,13 @@ public void testCreateDeleteDatasetLink() { // This should fail, because we are attempting to link the dataset // to its own dataverse: + Response publishTargetDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishTargetDataverse.prettyPrint(); + publishTargetDataverse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response publishDatasetForLinking = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + Response createLinkingDatasetResponse = UtilIT.createDatasetLink(datasetId.longValue(), dataverseAlias, apiToken); createLinkingDatasetResponse.prettyPrint(); createLinkingDatasetResponse.then().assertThat() @@ -1436,7 +1443,7 @@ public void testCreateDeleteDatasetLink() { createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.prettyPrint(); dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - + publishTargetDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); // And link the dataset to this new dataverse: createLinkingDatasetResponse = UtilIT.createDatasetLink(datasetId.longValue(), dataverseAlias, apiToken); From 3ad5e4bb2ba8264cf0d9c2d1d3c7794b874415aa Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 24 Sep 2018 14:39:24 -0400 Subject: [PATCH 082/114] #5085 CR Cleanup --- .../harvard/iq/dataverse/api/DatasetsIT.java | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index a080a078e4d..94b207969b6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1426,13 +1426,16 @@ public void testCreateDeleteDatasetLink() { // This should fail, because we are attempting to link the dataset // to its own dataverse: - Response publishTargetDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + Response publishTargetDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); publishTargetDataverse.prettyPrint(); - publishTargetDataverse.then().assertThat() - .statusCode(OK.getStatusCode()); - + publishTargetDataverse.then().assertThat() + .statusCode(OK.getStatusCode()); + Response publishDatasetForLinking = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); - + publishDatasetForLinking.prettyPrint(); + publishDatasetForLinking.then().assertThat() + .statusCode(OK.getStatusCode()); + Response createLinkingDatasetResponse = UtilIT.createDatasetLink(datasetId.longValue(), dataverseAlias, apiToken); createLinkingDatasetResponse.prettyPrint(); createLinkingDatasetResponse.then().assertThat() @@ -1443,7 +1446,10 @@ public void testCreateDeleteDatasetLink() { createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.prettyPrint(); dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - publishTargetDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishTargetDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDatasetForLinking.prettyPrint(); + publishTargetDataverse.then().assertThat() + .statusCode(OK.getStatusCode()); // And link the dataset to this new dataverse: createLinkingDatasetResponse = UtilIT.createDatasetLink(datasetId.longValue(), dataverseAlias, apiToken); From 23fa452568af62fb9d070072ab893ad9238da71e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 24 Sep 2018 16:27:38 -0400 Subject: [PATCH 083/114] #5078 Update BundleUtil logic for absent language directory --- .../harvard/iq/dataverse/util/BundleUtil.java | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java index 2b9b428af3e..4571a410aa5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java @@ -23,22 +23,19 @@ public static String getStringFromBundle(String key) { public static String getStringFromBundle(String key, List arguments) { DataverseLocaleBean d = new DataverseLocaleBean(); - ResourceBundle bundle ; - bundle_locale= new Locale(d.getLocaleCode()); + ResourceBundle bundle; + bundle_locale = new Locale(d.getLocaleCode()); + File bundleFileDir = null; String filesRootDirectory = System.getProperty("dataverse.lang.directory"); - if (filesRootDirectory == null || filesRootDirectory.isEmpty()) { - filesRootDirectory = "/tmp/lang"; + + if (filesRootDirectory != null && !filesRootDirectory.isEmpty()) { + bundleFileDir = new File(filesRootDirectory); } - File bundleFileDir = new File(filesRootDirectory); - - if (!bundleFileDir.exists()) - { + if (bundleFileDir == null || !bundleFileDir.exists()) { bundle = ResourceBundle.getBundle(defaultBundleFile, bundle_locale); - } - else { - + } else { URL[] urls = null; try { urls = new URL[]{bundleFileDir.toURI().toURL()}; From 3d21f60b95a63ec700abaaf52fae28af392316fd Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 24 Sep 2018 18:15:50 -0400 Subject: [PATCH 084/114] the installer extras - DataCite in the default config (instead of EZID), the ability to enter a non-default port for the smtp server. (#4966) --- .../source/installation/config.rst | 8 ++++- scripts/api/setup-all.sh | 2 +- scripts/installer/glassfish-setup.sh | 9 +++--- scripts/installer/install | 29 +++++++++++++++---- 4 files changed, 37 insertions(+), 11 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index e5449d5f6c0..8d5f0a10ee7 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -102,7 +102,13 @@ Persistent Identifiers and Publishing Datasets Persistent identifiers are a required and integral part of the Dataverse platform. They provide a URL that is guaranteed to resolve to the datasets or files they represent. Dataverse currently supports creating identifiers using DOI and Handle. -By default and for testing convenience, the installer configures a temporary DOI test namespace through EZID. This is sufficient to create and publish datasets and files, but they are not citable nor guaranteed to be preserved. Note that any datasets or files created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured. +By default, the installer configures a test DOI namespace (10.5072) with DataCite as the registration provider. Please note that as of the release 4.9.3, we can no longer use EZID as the provider. Unlike EZID, DataCite requires that you register for a test account (please contact support@datacite.org). Once you receive the login name and password for the account, configure it in your domain.xml, as the following two JVM options:: + + -Ddoi.username=... + -Ddoi.password=... + + +and restart glassfish. Once this is done, you will be able to publish datasets and files, but they will not be citable, or guaranteed to be preserved. Note that any datasets or files created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured. To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider: **EZID** (http://ezid.cdlib.org), **DataCite** (https://www.datacite.org), **Handle.Net** (https://www.handle.net). diff --git a/scripts/api/setup-all.sh b/scripts/api/setup-all.sh index 03cb2252f2c..b5b36516806 100755 --- a/scripts/api/setup-all.sh +++ b/scripts/api/setup-all.sh @@ -51,7 +51,7 @@ curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:Sig curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority" curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" -curl -X PUT -d EZID "$SERVER/admin/settings/:DoiProvider" +curl -X PUT -d DataCite "$SERVER/admin/settings/:DoiProvider" curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy echo diff --git a/scripts/installer/glassfish-setup.sh b/scripts/installer/glassfish-setup.sh index 1a8d5785a3a..d37eeb850fa 100755 --- a/scripts/installer/glassfish-setup.sh +++ b/scripts/installer/glassfish-setup.sh @@ -66,10 +66,11 @@ function preliminary_setup() # password reset token timeout in minutes ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.auth.password-reset-timeout-in-minutes=60" - # EZID DOI Settings - ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.password=apitest" - ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=apitest" - ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=https\://ezid.cdlib.org" + # DataCite DOI Settings + # (we can no longer offer EZID with their shared test account) + #./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.password=apitest" + #./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=apitest" + ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=https\://mds.test.datacite.org" ./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true" # enable comet support diff --git a/scripts/installer/install b/scripts/installer/install index 6a0bca21ab8..a66cf950432 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -91,7 +91,7 @@ my %CONFIG_DEFAULTS = ( 'GLASSFISH_DIRECTORY', '/usr/local/glassfish4', 'GLASSFISH_USER', '', 'ADMIN_EMAIL', '', - 'MAIL_SERVER', 'mail.hmdc.harvard.edu', + 'MAIL_SERVER', 'mail.hmdc.harvard.edu:25', 'POSTGRES_ADMIN_PASSWORD', 'secret', 'POSTGRES_SERVER', '127.0.0.1', @@ -114,7 +114,7 @@ my %CONFIG_PROMPTS = ( 'GLASSFISH_USER', 'Glassfish service account username', 'GLASSFISH_DIRECTORY', 'Glassfish Directory', 'ADMIN_EMAIL', 'Administrator email address for this Dataverse', - 'MAIL_SERVER', 'SMTP (mail) server to relay notification messages', + 'MAIL_SERVER', 'SMTP (mail) server (and port) to relay notification messages', 'POSTGRES_SERVER', 'Postgres Server Address', 'POSTGRES_PORT', 'Postgres Server Port', @@ -725,6 +725,21 @@ if ($WARFILE_LOCATION =~/([0-9]\.[0-9]\.[0-9])\.war$/) } +print "\nYour Dataverse has been configured to use DataCite, to register DOI global identifiers in the \n"; +print "test name space \"10.5072\" with the \"shoulder\" \"FK2\"\n"; +print "However, you have to contact DataCite (support\@datacite.org) and request a test account, before you \n"; +print "can publish datasets. Once you receive the account name and password, add them to your domain.xml,\n"; +print "as the following two JVM options:\n"; +print "\t-Ddoi.username=...\n"; +print "\t-Ddoi.password=...\n"; +print "and restart glassfish\n"; +print "If this is a production Dataverse and you are planning to register datasets as \n"; +print "\"real\", non-test DOIs or Handles, consult the \"Persistent Identifiers and Publishing Datasets\"\n"; +print "section of the Installataion guide, on how to configure your Dataverse with the proper registration\n"; +print "credentials.\n\n"; + + + # (going to skip the Rserve check; it's no longer a required, or even a recommended component) exit 0; @@ -1104,14 +1119,18 @@ sub validate_smtp_server { $mail_server_status = 1; - unless ( $mail_server_iaddr = inet_aton( $CONFIG_DEFAULTS{'MAIL_SERVER'} ) ) { - print STDERR "Could not look up $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + my $userentry = $CONFIG_DEFAULTS{'MAIL_SERVER'}; + my ($testserver, $testport) = split (":", $userentry); + + unless ( $mail_server_iaddr = inet_aton( $testserver ) ) { + print STDERR "Could not look up $testserver,\n"; print STDERR "the host you specified as your mail server\n"; $mail_server_status = 0; } if ($mail_server_status) { - my $mail_server_paddr = sockaddr_in( 25, $mail_server_iaddr ); + $testport = 25 unless $testport; + my $mail_server_paddr = sockaddr_in( $testport, $mail_server_iaddr ); $mail_server_proto = getprotobyname('tcp'); unless ( socket( SOCK, PF_INET, SOCK_STREAM, $mail_server_proto ) From 9ce1b3ab511fcab1f6c3e4166e14c8b0dfbbfbf5 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 24 Sep 2018 19:07:04 -0400 Subject: [PATCH 085/114] Revert "document quick fix for new devs #5078" This reverts commit bc92d0e955fbd472e66a0d44ff3dc388b6f05768. --- .../source/developers/dev-environment.rst | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index bcb79afa380..fc1d99fc289 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -144,16 +144,6 @@ To install Solr, execute the following commands: ``bin/solr create_core -c collection1 -d server/solr/collection1/conf`` -Quick Fix for Bundle Problem ----------------------------- - -Please see https://github.com/IQSS/dataverse/issues/5078 for what this is about. We hope to remove this quick fix soon. - -``mkdir /tmp/lang`` - -``cp src/main/java/Bundle.properties /tmp/lang`` - - Run the Dataverse Installer Script ---------------------------------- From b31ed6c1c492ce2c5fb1c451c36e0ed9944dc221 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 19 Sep 2018 16:37:01 +0200 Subject: [PATCH 086/114] Fixes #2940. Upgrading to use JUnit 5 using Jupiter 5.3 Engine combined with JUnit 4.12 Vintage Engine. This allows us to have both versions active in parallel and make the migration easier. Fixed `IngestableDataCheckerTest.testTestSAVformat()` unit test, which was failing with 4.12. This was not an engine fault but a mistake within the test case itself. --- pom.xml | 24 +++++++++++++++++-- .../ingest/IngestableDataCheckerTest.java | 4 ++-- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 188959f80f5..d636db874e3 100644 --- a/pom.xml +++ b/pom.xml @@ -17,6 +17,11 @@ UTC en US + + 4.12 + 5.3.1 + 5.3.1 + 1.3.1 @@ -66,12 +71,27 @@ passay 1.1.0 + + org.junit.jupiter + junit-jupiter-api + ${junit.jupiter.version} + test + junit junit - 4.8.1 + ${junit.version} test - jar + + + org.junit.jupiter + junit-jupiter-engine + ${junit.jupiter.version} + + + org.junit.vintage + junit-vintage-engine + ${junit.vintage.version} org.glassfish diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java index 2c69393963f..ea9e378739b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java @@ -165,7 +165,7 @@ public void testTestSAVformat() throws IOException { assertEquals(result, "application/x-spss-sav"); msgt("(2b) File is empty string"); - buff = createTempFileAndGetBuffer("testNotSAV.txt", ""); + buff = createTempFileAndGetBuffer("testNotSAV-empty.txt", ""); instance = new IngestableDataChecker(); result = instance.testSAVformat(buff); @@ -173,7 +173,7 @@ public void testTestSAVformat() throws IOException { assertEquals(result, null); msgt("(2c) File is non-SAV string"); - buff = createTempFileAndGetBuffer("testNotSAV.txt", "i-am-not-a-x-spss-sav-file"); + buff = createTempFileAndGetBuffer("testNotSAV-string.txt", "i-am-not-a-x-spss-sav-file"); instance = new IngestableDataChecker(); result = instance.testSAVformat(buff); msg("result 2c: " + result); From 792dd1c2750f903d4b3d6670a76299c21c9ab711 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 20 Sep 2018 13:47:09 +0200 Subject: [PATCH 087/114] Hotfixing #5061 that broke coverage reports because of missing '@{argLine}' in surefire plugin argLine option. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index d636db874e3..dd7d2653877 100644 --- a/pom.xml +++ b/pom.xml @@ -658,7 +658,7 @@ ${testsToExclude} - -Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region} + ${argLine} -Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region} From 82ed295bfb018feef8a4941fd00b8e8b980b2dff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 20 Sep 2018 14:07:04 +0200 Subject: [PATCH 088/114] Update Mockito from 1.10 to 2.22 to be up-to-date and enable proper JUnit 5 support. --- pom.xml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index dd7d2653877..a809c635e30 100644 --- a/pom.xml +++ b/pom.xml @@ -374,11 +374,12 @@ org.slf4j slf4j-log4j12 1.7.7 - + org.mockito mockito-core - 1.10.19 + 2.22.0 + test + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/doc/sphinx-guides/source/developers/coding-style.rst b/doc/sphinx-guides/source/developers/coding-style.rst index c7b96028d5e..5ae98996adc 100755 --- a/doc/sphinx-guides/source/developers/coding-style.rst +++ b/doc/sphinx-guides/source/developers/coding-style.rst @@ -59,10 +59,20 @@ Format Code You Changed with Netbeans As you probably gathered from the :doc:`dev-environment` section, IQSS has standardized on Netbeans. It is much appreciated when you format your code (but only the code you touched!) using the out-of-the-box Netbeans configuration. If you have created an entirely new Java class, you can just click Source -> Format. If you are adjusting code in an existing class, highlight the code you changed and then click Source -> Format. Keeping the "diff" in your pull requests small makes them easier to code review. -The Netbeans formatting syntax appears not to be documented anywhere, however from an initial approximation `astyle --mode=java --style=attach --add-braces ${source_file}` is reasonably close. -If `astyle` is not installed on your system, it is available from ``_. +Checking Your Formatting With Checkstyle +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -We would like to someday automate the detection and possibly correction of code that hasn't been formatted using our house style (the default Netbeans style). We've heard that https://maven.apache.org/plugins/maven-checkstyle-plugin/ can do this but we would be happy to see a pull request in this area, especially if it also hooks up to our builds at https://travis-ci.org/IQSS/dataverse . +The easiest way to adopt Dataverse coding style is to use Netbeans as your IDE, avoid change the default Netbeans formatting settings, and only reformat code you've changed, as described above. + +If you do not use Netbeans, you are encouraged to check the formatting of your code using Checkstyle. + +To check the entire project: + +``mvn checkstyle:checkstyle`` + +To check a single file: + +``mvn checkstyle:checkstyle -Dcheckstyle.includes=**\/SystemConfig*.java`` Logging ~~~~~~~ diff --git a/pom.xml b/pom.xml index 188959f80f5..1008cea0bfe 100644 --- a/pom.xml +++ b/pom.xml @@ -641,6 +641,16 @@ -Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region} + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.0.0 + + checkstyle.xml + UTF-8 + true + + From 50060e045ca6bbf1dec9d8fdc77f18303416d803 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 27 Sep 2018 14:13:40 -0400 Subject: [PATCH 105/114] #5107 disable migrate handle test --- src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java index b7cfb5d291f..58a6d17bd78 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java @@ -26,6 +26,7 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import org.junit.Ignore; public class AdminIT { @@ -511,7 +512,14 @@ public void testFindPermissonsOn() { } @Test + @Ignore public void testMigrateHDLToDOI() { + /* + This test is set to ignore because it requires a setup that will + mint both handles and doi identifiers + Can re-enable when if test environments are running handle servers. + SEK 09/27/2018 + */ Response createUser = UtilIT.createRandomUser(); createUser.prettyPrint(); From 75f4720dc903a565fd7decd8578ed2bd9148a20f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 27 Sep 2018 15:18:26 -0400 Subject: [PATCH 106/114] clarify that EZID is no longer the default #5109 --- .../source/developers/troubleshooting.rst | 2 +- .../source/installation/config.rst | 24 +++++++------------ 2 files changed, 9 insertions(+), 17 deletions(-) diff --git a/doc/sphinx-guides/source/developers/troubleshooting.rst b/doc/sphinx-guides/source/developers/troubleshooting.rst index a1b5397aaf1..fef99f48a67 100755 --- a/doc/sphinx-guides/source/developers/troubleshooting.rst +++ b/doc/sphinx-guides/source/developers/troubleshooting.rst @@ -101,7 +101,7 @@ You may also find https://github.com/IQSS/dataverse/blob/develop/scripts/deploy/ DataCite -------- -If you've reconfigured from EZID to DataCite and are seeing ``Response code: 400, [url] domain of URL is not allowed`` it's probably because your ``dataverse.siteUrl`` JVM option is unset or set to localhost (``-Ddataverse.siteUrl=http://localhost:8080``). You can try something like this: +If you are seeing ``Response code: 400, [url] domain of URL is not allowed`` it's probably because your ``dataverse.siteUrl`` JVM option is unset or set to localhost (``-Ddataverse.siteUrl=http://localhost:8080``). You can try something like this: ``./asadmin delete-jvm-options '-Ddataverse.siteUrl=http\://localhost\:8080'`` diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 56ef31b5bcf..e6b09ff1f25 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -109,7 +109,7 @@ By default, the installer configures a test DOI namespace (10.5072) with DataCit and restart Glassfish. Once this is done, you will be able to publish datasets and files, but the persistent identifiers will not be citable or guaranteed to be preserved. Note that any datasets or files created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured. -To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider: **EZID** (http://ezid.cdlib.org), **DataCite** (https://www.datacite.org), **Handle.Net** (https://www.handle.net). +To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataverse.org/global-dataverse-community-consortium for more on joining DataCite) but **EZID** (http://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider. Once you have your DOI or Handle account credentials and a namespace, configure Dataverse to use them using the JVM options and database settings below. @@ -549,17 +549,17 @@ For limiting the size (in bytes) of thumbnail images generated from files. doi.baseurlstring +++++++++++++++++ -As of this writing, "https://ezid.cdlib.org" (EZID) and "https://mds.datacite.org" (DataCite) are the main valid values. +As of this writing, "https://mds.datacite.org" (DataCite) and "https://ezid.cdlib.org" (EZID) are the main valid values. While the above two options are recommended because they have been tested by the Dataverse team, it is also possible to use a DataCite Client API as a proxy to DataCite. In this case, requests made to the Client API are captured and passed on to DataCite for processing. The application will interact with the DataCite Client API exactly as if it were interacting directly with the DataCite API, with the only difference being the change to the base endpoint URL. -For example, the Australian Data Archive (ADA) successfully uses the Australian National Data Service (ANDS) API (a proxy for DataCite) to mint their DOIs through Dataverse using a ``doi.baseurlstring`` value of "https://researchdata.ands.org.au/api/doi/datacite" as documented at https://documentation.ands.org.au/display/DOC/ANDS+DataCite+Client+API . As ADA did for ANDS DOI minting, any DOI provider (and their corresponding DOI configuration parameters) other than DataCite and EZID must be tested with Dataverse to establish whether or not it will function properly. +For example, the Australian Data Archive (ADA) successfully uses the Australian National Data Service (ANDS) API (a proxy for DataCite) to mint their DOIs through Dataverse using a ``doi.baseurlstring`` value of "https://researchdata.ands.org.au/api/doi/datacite" as documented at https://documentation.ands.org.au/display/DOC/ANDS+DataCite+Client+API . As ADA did for ANDS DOI minting, any DOI provider (and their corresponding DOI configuration parameters) other than DataCite must be tested with Dataverse to establish whether or not it will function properly. -Out of the box, Dataverse is configured to use base URL string from EZID. You can delete it like this: +Out of the box, Dataverse is configured to use a test MDS DataCite base URL string. You can delete it like this: -``./asadmin delete-jvm-options '-Ddoi.baseurlstring=https\://ezid.cdlib.org'`` +``./asadmin delete-jvm-options '-Ddoi.baseurlstring=https\://mds.test.datacite.org'`` -Then, to switch to DataCite, you can issue the following command: +Then, to switch to production DataCite, you can issue the following command: ``./asadmin create-jvm-options '-Ddoi.baseurlstring=https\://mds.datacite.org'`` @@ -577,10 +577,6 @@ doi.username Used in conjuction with ``doi.baseurlstring``. -Out of the box, Dataverse is configured with a test username from EZID. You can delete it with the following command: - -``./asadmin delete-jvm-options '-Ddoi.username=apitest'`` - Once you have a username from your provider, you can enter it like this: ``./asadmin create-jvm-options '-Ddoi.username=YOUR_USERNAME_HERE'`` @@ -590,12 +586,8 @@ Once you have a username from your provider, you can enter it like this: doi.password ++++++++++++ -Out of the box, Dataverse is configured with a test password from EZID. You can delete it with the following command: - Used in conjuction with ``doi.baseurlstring``. -``./asadmin delete-jvm-options '-Ddoi.password=apitest'`` - Once you have a password from your provider, you can enter it like this: ``./asadmin create-jvm-options '-Ddoi.password=YOUR_PASSWORD_HERE'`` @@ -742,9 +734,9 @@ By default the footer says "Copyright © [YYYY]" but you can add text after the :DoiProvider ++++++++++++ -As of this writing "EZID" and "DataCite" are the only valid options. DoiProvider is only needed if you are using DOI. +As of this writing "DataCite" and "EZID" are the only valid options. ``:DoiProvider`` is only needed if you are using DOI. -``curl -X PUT -d EZID http://localhost:8080/api/admin/settings/:DoiProvider`` +``curl -X PUT -d DataCite http://localhost:8080/api/admin/settings/:DoiProvider`` This setting relates to the ``:Protocol``, ``:Authority``, ``:Shoulder``, and ``:IdentifierGenerationStyle`` database settings below as well as the following JVM options: From 1ff3a255d3b08d8a6446d4b53c136847c50aa73c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 27 Sep 2018 15:19:55 -0400 Subject: [PATCH 107/114] #5052 - fix doubling of explore button --- src/main/webapp/file-download-button-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index e24dc2eedd5..a08acdf74e8 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -51,7 +51,7 @@
- + From 6ca8617053fe1ad1ce55f01c5ffe111e3c70c111 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 27 Sep 2018 15:41:31 -0400 Subject: [PATCH 108/114] specify 4 spaces for Java #5075 We should add this to Checkstyle too. --- doc/sphinx-guides/source/developers/coding-style.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/coding-style.rst b/doc/sphinx-guides/source/developers/coding-style.rst index 5ae98996adc..1771e0cdd0f 100755 --- a/doc/sphinx-guides/source/developers/coding-style.rst +++ b/doc/sphinx-guides/source/developers/coding-style.rst @@ -16,7 +16,7 @@ Formatting Code Tabs vs. Spaces ^^^^^^^^^^^^^^^ -Don't use tabs. Use spaces. +Don't use tabs. Use 4 spaces. Braces Placement ^^^^^^^^^^^^^^^^ From 80334fc0e635a4d2b45e32f5b298056484556088 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 27 Sep 2018 16:04:25 -0400 Subject: [PATCH 109/114] remove confusing information at top #5075 --- checkstyle.xml | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/checkstyle.xml b/checkstyle.xml index 32f5769a9d7..5a864136fea 100644 --- a/checkstyle.xml +++ b/checkstyle.xml @@ -3,26 +3,6 @@ "-//Puppy Crawl//DTD Check Configuration 1.3//EN" "http://checkstyle.sourceforge.net/dtds/configuration_1_3.dtd"> - -