diff --git a/doc/release-notes/8191-require-toa-or-request-access.md b/doc/release-notes/8191-require-toa-or-request-access.md new file mode 100644 index 00000000000..d78161973d3 --- /dev/null +++ b/doc/release-notes/8191-require-toa-or-request-access.md @@ -0,0 +1,17 @@ +## Release Highlights + +### Terms of Access or Request Access Required for Restricted Files + +Beginning in this release, Restricted Files must have either Terms of Access or Request Access enabled. This change is to ensure that for each file in a Dataverse installation there is a clear path to get to the data, either through requesting access to the data or to provide context about why requesting access is not enabled. + +In the "Notes for Dataverse Installation Administrators" section, we have provided a query to help proactively identify datasets that need to be updated. + +## Notes for Dataverse Installation Administrators + +### Identifying Datasets Requiring Terms of Access or Request Access Changes + +In support of the change to require either Terms of Access or Request Access for all Restricted Files, we have provided a query to identify datasets in your installation where at least one file has neither Terms of Access or Request Access enabled: + +https://github.com/IQSS/dataverse/raw/develop/scripts/issues/8191/ + +This will allow you to reach out to those dataset owners as appropriate. diff --git a/scripts/issues/8191/datasets_without_toa_or_request_access b/scripts/issues/8191/datasets_without_toa_or_request_access new file mode 100644 index 00000000000..c66722d5c8c --- /dev/null +++ b/scripts/issues/8191/datasets_without_toa_or_request_access @@ -0,0 +1,28 @@ +-- this query will identify datasets where at least one file does not have either terms of access or request access enabled, and will include owner information for those datasets + +select au.email, +concat(au.firstname, ' ', au.lastname), +concat('$SERVERNAME/dataset.xhtml?persistentId=doi:' , dvo.authority , '/' , dvo.identifier) +from roleassignment ra, dataverserole dvr, +authenticateduser au, dvobject dvo +where +au.useridentifier = rtrim(substring(ra.assigneeidentifier, 2, 100)) +and dvo.id = ra.definitionpoint_id +and +ra.role_id = dvr.id and +dvr.alias in ( + 'fullContributor', +'dsContributor', +'contributor', +'admin', +'curator' +) and +ra.definitionpoint_id in ( + select dvo.id from datasetversion v +join termsofuseandaccess ua on ua.id = v.termsofuseandaccess_id +join filemetadata fm on v.id = fm.datasetversion_id +join datafile f on f.id = fm.datafile_id +join dvobject dvo on v.dataset_id = dvo.id +where ua.fileaccessrequest = false and ua.termsofaccess isnull +and f.restricted = true +) \ No newline at end of file diff --git a/scripts/search/tests/data/dataset-finch1.jsonld b/scripts/search/tests/data/dataset-finch1.jsonld index be39c9f14b2..4a20b3e08ea 100644 --- a/scripts/search/tests/data/dataset-finch1.jsonld +++ b/scripts/search/tests/data/dataset-finch1.jsonld @@ -20,7 +20,7 @@ "http://schema.org/version": "DRAFT", "http://schema.org/name": "Darwin's Finches", "https://dataverse.org/schema/core#fileTermsOfAccess": { - "https://dataverse.org/schema/core#fileRequestAccess": false + "https://dataverse.org/schema/core#fileRequestAccess": true }, "http://schema.org/includedInDataCatalog": "Root" } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 32ec6bdcf99..c60ea7020bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -294,6 +294,7 @@ public void setVersions(List versions) { } private DatasetVersion createNewDatasetVersion(Template template, FileMetadata fmVarMet) { + DatasetVersion dsv = new DatasetVersion(); dsv.setVersionState(DatasetVersion.VersionState.DRAFT); dsv.setFileMetadatas(new ArrayList<>()); @@ -313,11 +314,11 @@ private DatasetVersion createNewDatasetVersion(Template template, FileMetadata f if (latestVersion.getDatasetFields() != null && !latestVersion.getDatasetFields().isEmpty()) { dsv.setDatasetFields(dsv.copyDatasetFields(latestVersion.getDatasetFields())); } - - if (latestVersion.getTermsOfUseAndAccess()!= null){ - dsv.setTermsOfUseAndAccess(latestVersion.getTermsOfUseAndAccess().copyTermsOfUseAndAccess()); - } - + /* + adding file metadatas here and updating terms + because the terms need to know about the files + in a pre-save validation SEK 12/6/2021 + */ for (FileMetadata fm : latestVersion.getFileMetadatas()) { FileMetadata newFm = new FileMetadata(); // TODO: @@ -348,6 +349,18 @@ private DatasetVersion createNewDatasetVersion(Template template, FileMetadata f dsv.getFileMetadatas().add(newFm); } + + if (latestVersion.getTermsOfUseAndAccess()!= null){ + TermsOfUseAndAccess terms = latestVersion.getTermsOfUseAndAccess().copyTermsOfUseAndAccess(); + terms.setDatasetVersion(dsv); + dsv.setTermsOfUseAndAccess(terms); + } else { + TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); + terms.setDatasetVersion(dsv); + terms.setLicense(null); + terms.setFileAccessRequest(true); + dsv.setTermsOfUseAndAccess(terms); + } } // I'm adding the version to the list so it will be persisted when diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 48804fe70b9..a900eefb4ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -138,6 +138,7 @@ import edu.harvard.iq.dataverse.search.SearchServiceBean; import edu.harvard.iq.dataverse.search.SearchUtil; import edu.harvard.iq.dataverse.search.SolrClientService; +import edu.harvard.iq.dataverse.util.FileMetadataUtil; import java.util.Comparator; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -391,6 +392,43 @@ public void setRsyncScript(String rsyncScript) { public String getRsyncScriptFilename() { return rsyncScriptFilename; } + + private Boolean hasValidTermsOfAccess = null; + + public Boolean isHasValidTermsOfAccess() { + //cache in page to limit processing + if (hasValidTermsOfAccess != null){ + return hasValidTermsOfAccess; + } else { + if (!isHasRestrictedFiles()){ + hasValidTermsOfAccess = true; + return hasValidTermsOfAccess; + } else { + hasValidTermsOfAccess = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getEditVersion().getTermsOfUseAndAccess(), null); + return hasValidTermsOfAccess; + } + } + } + + private Boolean hasRestrictedFiles = null; + + public Boolean isHasRestrictedFiles(){ + //cache in page to limit processing + if (hasRestrictedFiles != null){ + return hasRestrictedFiles; + } else { + hasRestrictedFiles = workingVersion.isHasRestrictedFile(); + return hasRestrictedFiles; + } + } + + public boolean getHasValidTermsOfAccess(){ + return isHasValidTermsOfAccess(); //HasValidTermsOfAccess + } + + public void setHasValidTermsOfAccess(boolean value){ + //dummy for ui + } private String thumbnailString = null; @@ -2015,7 +2053,8 @@ private String init(boolean initFull) { JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("dataset.message.label.fileAccess"), BundleUtil.getStringFromBundle("dataset.message.publicInstall")); } - + setFileAccessRequest(workingVersion.getTermsOfUseAndAccess().isFileAccessRequest()); + setTermsOfAccess(workingVersion.getTermsOfUseAndAccess().getTermsOfAccess()); resetVersionUI(); // FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Add New Dataset", " - Enter metadata to create the dataset's citation. You can add more metadata about this dataset after it's created.")); @@ -2071,7 +2110,14 @@ private String init(boolean initFull) { previewTools = externalToolService.findFileToolsByType(ExternalTool.Type.PREVIEW); datasetExploreTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.EXPLORE); rowsPerPage = 10; - + if (dataset.getId() != null && canUpdateDataset()) { + hasRestrictedFiles = workingVersion.isHasRestrictedFile(); + hasValidTermsOfAccess = isHasValidTermsOfAccess(); + if (!hasValidTermsOfAccess) { + String message = BundleUtil.getStringFromBundle("dataset.message.editMetadata.invalid.TOUA.message"); + JsfHelper.addWarningMessage(message); + } + } return null; } @@ -2435,17 +2481,15 @@ public void edit(EditMode editMode) { } workingVersion = dataset.getEditVersion(); clone = workingVersion.cloneDatasetVersion(); - if (editMode == EditMode.INFO) { - // ? - } else if (editMode == EditMode.FILE) { - // JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.editFiles")); - // FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Upload + Edit Dataset Files", " - You can drag and drop your files from your desktop, directly into the upload widget.")); - } else if (editMode.equals(EditMode.METADATA)) { + if (editMode.equals(EditMode.METADATA)) { datasetVersionUI = datasetVersionUI.initDatasetVersionUI(workingVersion, true); updateDatasetFieldInputLevels(); JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.editMetadata.label"), BundleUtil.getStringFromBundle("dataset.message.editMetadata.message")); //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Edit Dataset Metadata", " - Add more metadata about your dataset to help others easily find it.")); } else if (editMode.equals(EditMode.LICENSE)){ + if(!isHasValidTermsOfAccess()){ + workingVersion.getTermsOfUseAndAccess().setFileAccessRequest(true); + } JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.editTerms.label"), BundleUtil.getStringFromBundle("dataset.message.editTerms.message")); //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Edit Dataset License and Terms", " - Update your dataset's license and terms of use.")); } @@ -3235,15 +3279,17 @@ public List completeHostDataverseMenuList(String query) { public String restrictFiles(boolean restricted) throws CommandException { List filesToRestrict = new ArrayList(); - if (fileMetadataForAction != null) { filesToRestrict.add(fileMetadataForAction); } else { filesToRestrict = this.getSelectedFiles(); } - restrictFiles(filesToRestrict, restricted); - return save(); + if (editMode == EditMode.CREATE) { + return ""; + } else { + return save(); + } } private void restrictFiles(List filesToRestrict, boolean restricted) throws CommandException { @@ -3327,7 +3373,16 @@ public String deleteFiles() throws CommandException{ } deleteFiles(filesToDelete); - String retVal = save(); + String retVal; + + if (editMode == EditMode.CREATE) { + workingVersion.setFileMetadatas(new ArrayList<>()); + retVal = ""; + } else { + retVal = save(); + } + + //And delete them only after the dataset is updated for(Embargo emb: orphanedEmbargoes) { embargoService.deleteById(emb.getId(), ((AuthenticatedUser)session.getUser()).getUserIdentifier()); @@ -3362,32 +3417,12 @@ private void deleteFiles(List filesToDelete) { // So below we are deleting the metadata from the version; we are // NOT adding the file to the filesToBeDeleted list that will be // passed to the UpdateDatasetCommand. -- L.A. Aug 2017 - Iterator fmit = dataset.getEditVersion().getFileMetadatas().iterator(); - while (fmit.hasNext()) { - FileMetadata fmd = fmit.next(); - if (markedForDelete.getDataFile().getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) { - // And if this is an image file that happens to be assigned - // as the dataset thumbnail, let's null the assignment here: - - if (fmd.getDataFile().equals(dataset.getThumbnailFile())) { - dataset.setThumbnailFile(null); - } - /* It should not be possible to get here if this file - is not in fact released! - so the code block below - is not needed. - //if not published then delete identifier - if (!fmd.getDataFile().isReleased()){ - try{ - commandEngine.submit(new DeleteDataFileCommand(fmd.getDataFile(), dvRequestService.getDataverseRequest())); - } catch (CommandException e){ - //this command is here to delete the identifier of unreleased files - //if it fails then a reserved identifier may still be present on the remote provider - } - } */ - fmit.remove(); - break; - } - } + + FileMetadataUtil.removeFileMetadataFromList(workingVersion.getFileMetadatas(), markedForDelete); + + FileMetadataUtil.removeDataFileFromList(newFiles, markedForDelete.getDataFile()); + FileUtil.deleteTempFile(markedForDelete.getDataFile(), dataset, ingestService); + } } @@ -3577,6 +3612,7 @@ public String save() { if (editMode != null) { if (editMode.equals(EditMode.CREATE)) { + // We allow users to upload files on Create: int nNewFiles = newFiles.size(); logger.fine("NEW FILES: "+nNewFiles); @@ -4506,12 +4542,23 @@ public void setCategoriesByName(List dummy){ } public void refreshTagsPopUp(){ + if(!isHasValidTermsOfAccess()){ + this.editMode = EditMode.LICENSE; + PrimeFaces.current().executeScript("PF('blockDatasetForm').show()"); + PrimeFaces.current().executeScript("PF('accessPopup').show()"); + JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.editTerms.label"), BundleUtil.getStringFromBundle("dataset.message.editTerms.message")); + this.readOnly = false; + return; + } + if (workingVersion.isReleased()) { refreshSelectedFiles(selectedFiles); } updateFileCounts(); - refreshCategoriesByName(); + refreshCategoriesByName(); + refreshTabFileTagsByName(); + PrimeFaces.current().executeScript("PF('fileTagsPopup').show()"); } private List tabFileTagsByName; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index f211ccd0410..73b4723de53 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; @@ -566,6 +567,13 @@ public boolean isHasNonPackageFile(){ // The presence of any non-package file means that HTTP Upload was used (no mixing allowed) so we just check the first file. return !this.fileMetadatas.get(0).getDataFile().getContentType().equals(DataFileServiceBean.MIME_TYPE_PACKAGE_FILE); } + + public boolean isHasRestrictedFile(){ + if (this.fileMetadatas == null || this.fileMetadatas.isEmpty()){ + return false; + } + return this.fileMetadatas.stream().anyMatch(fm -> (fm.isRestricted())); + } public void updateDefaultValuesFromTemplate(Template template) { if (!template.getDatasetFields().isEmpty()) { @@ -591,10 +599,12 @@ public DatasetVersion cloneDatasetVersion(){ dsv.setDatasetFields(dsv.copyDatasetFields(this.getDatasetFields())); } - if (this.getTermsOfUseAndAccess()!= null){ - dsv.setTermsOfUseAndAccess(this.getTermsOfUseAndAccess().copyTermsOfUseAndAccess()); - } - + /* + adding file metadatas here and updating terms + because the terms need to know about the files + in a pre-save validation SEK 12/6/2021 + */ + for (FileMetadata fm : this.getFileMetadatas()) { FileMetadata newFm = new FileMetadata(); // TODO: @@ -614,6 +624,17 @@ public DatasetVersion cloneDatasetVersion(){ dsv.getFileMetadatas().add(newFm); } + + if (this.getTermsOfUseAndAccess()!= null){ + TermsOfUseAndAccess terms = this.getTermsOfUseAndAccess().copyTermsOfUseAndAccess(); + terms.setDatasetVersion(dsv); + dsv.setTermsOfUseAndAccess(terms); + } else { + TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); + terms.setDatasetVersion(dsv); + // terms.setLicense(TermsOfUseAndAccess.License.CC0); + dsv.setTermsOfUseAndAccess(terms); + } dsv.setDataset(this.getDataset()); return dsv; @@ -627,6 +648,7 @@ public void initDefaultValues(License license) { TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); terms.setDatasetVersion(this); terms.setLicense(license); + terms.setFileAccessRequest(true); this.setTermsOfUseAndAccess(terms); } @@ -1656,7 +1678,22 @@ public Set validate() { } } } + + + TermsOfUseAndAccess toua = this.termsOfUseAndAccess; + //Only need to test Terms of Use and Access if there are restricted files + if (toua != null && this.isHasRestrictedFile()) { + Set> constraintViolations = validator.validate(toua); + if (constraintViolations.size() > 0) { + ConstraintViolation violation = constraintViolations.iterator().next(); + String message = BundleUtil.getStringFromBundle("toua.invalid"); + logger.info(message); + this.termsOfUseAndAccess.setValidationMessage(message); + returnSet.add(violation); + } + } + return returnSet; } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index a7fc03e216f..c4d3f51c86a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -96,11 +96,11 @@ public enum FileEditMode { EDIT, UPLOAD, CREATE, REPLACE }; - + public enum Referrer { DATASET, FILE }; - + @EJB DatasetServiceBean datasetService; @EJB @@ -131,9 +131,12 @@ public enum Referrer { IndexServiceBean indexService; @Inject DataverseRequestServiceBean dvRequestService; - @Inject PermissionsWrapper permissionsWrapper; - @Inject FileDownloadHelper fileDownloadHelper; - @Inject ProvPopupFragmentBean provPopupFragmentBean; + @Inject + PermissionsWrapper permissionsWrapper; + @Inject + FileDownloadHelper fileDownloadHelper; + @Inject + ProvPopupFragmentBean provPopupFragmentBean; @Inject SettingsWrapper settingsWrapper; @Inject @@ -142,15 +145,15 @@ public enum Referrer { DataFileCategoryServiceBean dataFileCategoryService; private Dataset dataset = new Dataset(); - - private FileReplacePageHelper fileReplacePageHelper; + private FileReplacePageHelper fileReplacePageHelper; - private String selectedFileIdsString = null; - private FileEditMode mode; + private String selectedFileIdsString = null; + private FileEditMode mode; private Referrer referrer = Referrer.DATASET; - private List selectedFileIdsList = new ArrayList<>(); - private List fileMetadatas = new ArrayList<>();; + private List selectedFileIdsList = new ArrayList<>(); + private List fileMetadatas = new ArrayList<>(); + ; private Long ownerId; @@ -158,19 +161,18 @@ public enum Referrer { private List newFiles = new ArrayList<>(); private List uploadedFiles = new ArrayList<>(); private List uploadedInThisProcess = new ArrayList<>(); - + private DatasetVersion workingVersion; private DatasetVersion clone; private String dropBoxSelection = ""; private String displayCitation; - private boolean tabularDataTagsUpdated = false; - + private boolean tabularDataTagsUpdated = false; + private String persistentId; - + private String versionString = ""; - - - private boolean saveEnabled = false; + + private boolean saveEnabled = false; // Used to store results of permissions checks private final Map datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean } @@ -183,9 +185,9 @@ public enum Referrer { //MutableBoolean so it can be passed from DatasetPage, supporting DatasetPage.cancelCreate() private MutableBoolean uploadInProgress = null; - + private final int NUMBER_OF_SCROLL_ROWS = 25; - + private DataFile singleFile = null; public DataFile getSingleFile() { @@ -195,29 +197,29 @@ public DataFile getSingleFile() { public void setSingleFile(DataFile singleFile) { this.singleFile = singleFile; } - + public String getSelectedFileIds() { return selectedFileIdsString; } - - public DataFile getFileToReplace(){ - if (!this.isFileReplaceOperation()){ + + public DataFile getFileToReplace() { + if (!this.isFileReplaceOperation()) { return null; } - if (this.fileReplacePageHelper == null){ + if (this.fileReplacePageHelper == null) { return null; } return this.fileReplacePageHelper.getFileToReplace(); } - + public void setSelectedFileIds(String selectedFileIds) { selectedFileIdsString = selectedFileIds; } - + public FileEditMode getMode() { return mode; } - + public void setMode(FileEditMode mode) { this.mode = mode; } @@ -229,27 +231,25 @@ public Referrer getReferrer() { public void setReferrer(Referrer referrer) { this.referrer = referrer; } - - - + public List getFileMetadatas() { - + // ------------------------------------- // Handle a Replace operation // - The List comes from a different source // ------------------------------------- - if (isFileReplaceOperation()){ - if (fileReplacePageHelper.wasPhase1Successful()){ + if (isFileReplaceOperation()) { + if (fileReplacePageHelper.wasPhase1Successful()) { logger.fine("Replace: File metadatas 'list' of 1 from the fileReplacePageHelper."); return fileReplacePageHelper.getNewFileMetadatasBeforeSave(); } else { logger.fine("Replace: replacement file not yet uploaded."); return null; - } + } } - + if (fileMetadatas != null) { - logger.fine("Returning a list of "+fileMetadatas.size()+" file metadatas."); + logger.fine("Returning a list of " + fileMetadatas.size() + " file metadatas."); } else { logger.fine("File metadatas list hasn't been initialized yet."); } @@ -261,14 +261,14 @@ public List getFileMetadatas() { //if (uploadInProgress) { // return null; //} - + return fileMetadatas; } - + public void setFileMetadatas(List fileMetadatas) { this.fileMetadatas = fileMetadatas; } - + /* The 2 methods below are for setting up the PrimeFaces:dataTabe component used to display the uploaded files, or the files selected for editing. @@ -289,48 +289,46 @@ When we have more than NUMBER_OF_SCROLL_ROWS worth of files (currently table fixed as the size of the list grows! (the "scrollRows" attribute of the p:dataTable component only applies when "liveScroll=true" is being used). - */ - + */ public boolean isScrollable() { return !(fileMetadatas == null || fileMetadatas.size() <= NUMBER_OF_SCROLL_ROWS + 1); } - + public String getScrollHeightPercentage() { - int perc; + int perc; if (fileMetadatas == null || fileMetadatas.size() < NUMBER_OF_SCROLL_ROWS) { perc = 100; } else { perc = NUMBER_OF_SCROLL_ROWS * 100 / fileMetadatas.size(); } - + if (perc == 0) { perc = 1; } else if (perc > 100) { perc = 100; } - - logger.fine("scroll height percentage: "+perc); + + logger.fine("scroll height percentage: " + perc); return perc + "%"; } - + /* Any settings, such as the upload size limits, should be saved locally - so that the db doesn't get hit repeatedly. (this setting is initialized in the init() method) This may be "null", signifying unlimited download size. - */ - + */ public Long getMaxFileUploadSizeInBytes() { return this.maxFileUploadSizeInBytes; } - + public String getHumanMaxFileUploadSizeInBytes() { return FileSizeChecker.bytesToHumanReadable(this.maxFileUploadSizeInBytes); } - + public boolean isUnlimitedUploadFileSize() { - + return this.maxFileUploadSizeInBytes == null; } @@ -366,41 +364,42 @@ public String populateHumanPerFormatTabularLimits() { The number of files the GUI user is allowed to upload in one batch, via drag-and-drop, or through the file select dialog. Now configurable in the Settings table. - */ + */ public Integer getMaxNumberOfFiles() { return this.multipleUploadFilesLimit; } + /** * Check Dataset related permissions - * + * * @param permissionToCheck - * @return + * @return */ - public boolean doesSessionUserHaveDataSetPermission(Permission permissionToCheck){ - if (permissionToCheck == null){ + public boolean doesSessionUserHaveDataSetPermission(Permission permissionToCheck) { + if (permissionToCheck == null) { return false; } - + String permName = permissionToCheck.getHumanName(); - + // Has this check already been done? // - if (this.datasetPermissionMap.containsKey(permName)){ + if (this.datasetPermissionMap.containsKey(permName)) { // Yes, return previous answer return this.datasetPermissionMap.get(permName); } - + // Check the permission // boolean hasPermission = this.permissionService.userOn(this.session.getUser(), this.dataset).has(permissionToCheck); // Save the permission this.datasetPermissionMap.put(permName, hasPermission); - + // return true/false return hasPermission; } - + public void reset() { // ? } @@ -408,7 +407,7 @@ public void reset() { public String getGlobalId() { return persistentId; } - + public String getPersistentId() { return persistentId; } @@ -479,49 +478,48 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo logger.fine("Request to initialize Edit Files page with null token (aborting)."); return null; } - + if (!modeToken.equals("CREATE")) { logger.fine("Request to initialize Edit Files page with token " + modeToken + " (aborting)."); - return null; + return null; } - + logger.fine("Initializing Edit Files page in CREATE mode;"); - + if (version == null) { return permissionsWrapper.notFound(); - } - - workingVersion = version; + } + + workingVersion = version; dataset = version.getDataset(); mode = FileEditMode.CREATE; - uploadInProgress= inProgress; + uploadInProgress = inProgress; newFiles = newFilesList; uploadedFiles = uploadedFilesList; selectedFiles = selectedFileMetadatasList; - + this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - + logger.fine("done"); - + saveEnabled = true; - return null; + return null; } - public String init() { // default mode should be EDIT if (mode == null) { mode = FileEditMode.EDIT; } - + newFiles = new ArrayList<>(); uploadedFiles = new ArrayList<>(); - uploadInProgress= new MutableBoolean(false); - - if (dataset.getId() != null){ + uploadInProgress = new MutableBoolean(false); + + if (dataset.getId() != null) { // Set Working Version and Dataset by Datasaet Id and Version //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionById(dataset.getId(), null); dataset = datasetService.find(dataset.getId()); @@ -535,16 +533,16 @@ public String init() { // that the dataset id is mandatory... But 404 will do for now. return permissionsWrapper.notFound(); } - + workingVersion = dataset.getEditVersion(); - + //TODO: review if we we need this check; // as getEditVersion should either return the exisiting draft or create a new one if (workingVersion == null || !workingVersion.isDraft()) { // Sorry, we couldn't find/obtain a draft version for this dataset! return permissionsWrapper.notFound(); } - + // Check if they have permission to modify this dataset: if (!permissionService.on(dataset).has(Permission.EditDataset)) { return permissionsWrapper.notAuthorized(); @@ -555,19 +553,27 @@ public String init() { this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit(); this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits(); this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit(); - + this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId()); + + hasValidTermsOfAccess = isHasValidTermsOfAccess(); + if (!hasValidTermsOfAccess) { + PrimeFaces.current().executeScript("PF('blockDatasetForm').show()"); + PrimeFaces.current().executeScript("PF('accessPopup').show()"); + JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.editTerms.label"), BundleUtil.getStringFromBundle("dataset.message.editTerms.message")); + return ""; + } // ------------------------------------------- // Is this a file replacement operation? // ------------------------------------------- - if (mode == FileEditMode.REPLACE){ + if (mode == FileEditMode.REPLACE) { /* http://localhost:8080/editdatafiles.xhtml?mode=REPLACE&datasetId=26&fid=726 - */ + */ DataFile fileToReplace = loadFileToReplace(); - if (fileToReplace == null){ + if (fileToReplace == null) { return permissionsWrapper.notFound(); } - + //DataverseRequest dvRequest2 = createDataverseRequest(authUser); AddReplaceFileHelper addReplaceFileHelper = new AddReplaceFileHelper(dvRequestService.getDataverseRequest(), ingestService, @@ -579,12 +585,12 @@ public String init() { licenseServiceBean); fileReplacePageHelper = new FileReplacePageHelper(addReplaceFileHelper, - dataset, - fileToReplace); + dataset, + fileToReplace); populateFileMetadatas(); singleFile = getFileToReplace(); - }else if (mode == FileEditMode.EDIT) { + } else if (mode == FileEditMode.EDIT) { if (selectedFileIdsString != null) { String[] ids = selectedFileIdsString.split(","); @@ -625,53 +631,52 @@ public String init() { if (fileMetadatas.size() < 1) { return permissionsWrapper.notFound(); } - - if (FileEditMode.EDIT == mode && Referrer.FILE == referrer){ - if (fileMetadatas.get(0).getDatasetVersion().getId() != null){ + + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer) { + if (fileMetadatas.get(0).getDatasetVersion().getId() != null) { versionString = "DRAFT"; } - } - + } + } - - saveEnabled = true; - if (mode == FileEditMode.UPLOAD && workingVersion.getFileMetadatas().isEmpty() && rsyncUploadSupported()) { + + saveEnabled = true; + if (mode == FileEditMode.UPLOAD && workingVersion.getFileMetadatas().isEmpty() && rsyncUploadSupported()) { setUpRsync(); } if (settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, false)){ JH.addMessage(FacesMessage.SEVERITY_WARN, getBundleString("dataset.message.publicInstall")); - } - + } + return null; } - - - private void msg(String s){ + + private void msg(String s) { System.out.println(s); } - + /** * For single file replacement, load the file to replace - * - * @return + * + * @return */ - private DataFile loadFileToReplace(){ - - Map params =FacesContext.getCurrentInstance(). - getExternalContext().getRequestParameterMap(); - - if (params.containsKey("fid")){ + private DataFile loadFileToReplace() { + + Map params = FacesContext.getCurrentInstance(). + getExternalContext().getRequestParameterMap(); + + if (params.containsKey("fid")) { String fid = params.get("fid"); - if ((!fid.isEmpty()) && (StringUtils.isNumeric(fid))){ + if ((!fid.isEmpty()) && (StringUtils.isNumeric(fid))) { selectedFileIdsList.add(Long.parseLong(fid)); return datafileService.find(Long.parseLong(fid)); } } return null; - + } // loadFileToReplace - + private List selectedFiles; // = new ArrayList<>(); public List getSelectedFiles() { @@ -681,7 +686,7 @@ public List getSelectedFiles() { public void setSelectedFiles(List selectedFiles) { this.selectedFiles = selectedFiles; } - + private boolean selectAllFiles; public boolean isSelectAllFiles() { @@ -691,7 +696,7 @@ public boolean isSelectAllFiles() { public void setSelectAllFiles(boolean selectAllFiles) { this.selectAllFiles = selectAllFiles; } - + public String getVersionString() { return versionString; } @@ -699,16 +704,16 @@ public String getVersionString() { public void setVersionString(String versionString) { this.versionString = versionString; } - - public void restrictFiles(boolean restricted) throws UnsupportedOperationException{ - + + public void restrictFiles(boolean restricted) throws UnsupportedOperationException { + if (restricted) { // get values from access popup workingVersion.getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); workingVersion.getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); } - + String fileNames = null; - + for (FileMetadata fmd : this.getSelectedFiles()) { if (restricted && !fmd.isRestricted()) { // collect the names of the newly-restrticted files, @@ -720,22 +725,22 @@ public void restrictFiles(boolean restricted) throws UnsupportedOperationExcepti } } fmd.setRestricted(restricted); - + if (workingVersion.isDraft() && !fmd.getDataFile().isReleased()) { // We do not really need to check that the working version is // a draft here - it must be a draft, if we've gotten this // far. But just in case. -- L.A. 4.2.1 - fmd.getDataFile().setRestricted(restricted); + fmd.getDataFile().setRestricted(restricted); } } if (fileNames != null) { String successMessage = getBundleString("file.restricted.success"); logger.fine(successMessage); successMessage = successMessage.replace("{0}", fileNames); - JsfHelper.addFlashMessage(successMessage); + JsfHelper.addSuccessMessage(successMessage); } - } - + } + public int getRestrictedFileCount() { int restrictedFileCount = 0; for (FileMetadata fmd : workingVersion.getFileMetadatas()) { @@ -749,57 +754,89 @@ public int getRestrictedFileCount() { private List filesToBeDeleted = new ArrayList<>(); - - public void deleteReplacementFile() throws FileReplaceException{ - if (!isFileReplaceOperation()){ - throw new FileReplaceException("Only use this for File Replace Operations"); + public void deleteReplacementFile() throws FileReplaceException { + if (!isFileReplaceOperation()) { + throw new FileReplaceException("Only use this for File Replace Operations"); } - if (!fileReplacePageHelper.wasPhase1Successful()){ - throw new FileReplaceException("Should only be called if Phase 1 was successful"); + if (!fileReplacePageHelper.wasPhase1Successful()) { + throw new FileReplaceException("Should only be called if Phase 1 was successful"); } - - fileReplacePageHelper.resetReplaceFileHelper(); + fileReplacePageHelper.resetReplaceFileHelper(); - String successMessage = getBundleString("file.deleted.replacement.success"); logger.fine(successMessage); JsfHelper.addFlashMessage(successMessage); - + + } + + private Boolean hasValidTermsOfAccess = null; + + public Boolean isHasValidTermsOfAccess() { + //cache in page to limit processing + if (hasValidTermsOfAccess != null) { + return hasValidTermsOfAccess; + } else { + if (!isHasRestrictedFiles()) { + hasValidTermsOfAccess = true; + return hasValidTermsOfAccess; + } else { + hasValidTermsOfAccess = TermsOfUseAndAccessValidator.isTOUAValid(workingVersion.getTermsOfUseAndAccess(), null); + return hasValidTermsOfAccess; + } + } } + public boolean getHasValidTermsOfAccess(){ + return isHasValidTermsOfAccess(); //HasValidTermsOfAccess + } + public void setHasValidTermsOfAccess(boolean value){ + //dummy for ui + } + + private Boolean hasRestrictedFiles = null; + + public Boolean isHasRestrictedFiles() { + //cache in page to limit processing + if (hasRestrictedFiles != null) { + return hasRestrictedFiles; + } else { + hasRestrictedFiles = workingVersion.isHasRestrictedFile(); + return hasRestrictedFiles; + } + } + /** - * + * * @param msgName - from the bundle e.g. "file.deleted.success" - * @return + * @return */ - private String getBundleString(String msgName){ - - return BundleUtil.getStringFromBundle(msgName); + private String getBundleString(String msgName) { + + return BundleUtil.getStringFromBundle(msgName); } - + // This deleteFilesCompleted method is used in editFilesFragment.xhtml - public void deleteFilesCompleted(){ - + public void deleteFilesCompleted() { + } - - public void deleteFiles(){ + + public void deleteFiles() { deleteFiles(this.selectedFiles); } - - public void deleteDuplicateFiles(){ + + public void deleteDuplicateFiles() { List filesForDelete = new ArrayList(); - for(DataFile df : newFiles ){ - if (df.isMarkedAsDuplicate()){ - filesForDelete.add(df.getFileMetadata()); - } + for (DataFile df : newFiles) { + if (df.isMarkedAsDuplicate()) { + filesForDelete.add(df.getFileMetadata()); } + } deleteFiles(filesForDelete); } - - + private void deleteFiles(List filesForDelete) { logger.fine("entering bulk file delete (EditDataFilesPage)"); if (isFileReplaceOperation()) { @@ -816,8 +853,7 @@ private void deleteFiles(List filesForDelete) { with a duplicate files delete situation so we are adding the marked as dup files as selected and moving on accordingly. - */ - + */ String fileNames = null; for (FileMetadata fmd : filesForDelete) { // collect the names of the files, @@ -881,8 +917,9 @@ private void deleteFiles(List filesForDelete) { // Also remove checksum from the list of newly uploaded checksums (perhaps odd // to delete and then try uploading the same file again, but it seems like it // should be allowed/the checksum list is part of the state to clean-up - if(checksumMapNew != null && markedForDelete.getDataFile().getChecksumValue() != null) + if (checksumMapNew != null && markedForDelete.getDataFile().getChecksumValue() != null) { checksumMapNew.remove(markedForDelete.getDataFile().getChecksumValue()); + } } } @@ -904,19 +941,17 @@ private void deleteFiles(List filesForDelete) { } } - - - /** * Save for File Replace operations + * * @return - * @throws FileReplaceException + * @throws FileReplaceException */ - public String saveReplacementFile() throws FileReplaceException{ - + public String saveReplacementFile() throws FileReplaceException { + // Ahh, make sure it's a file replace operation // - if (!isFileReplaceOperation()){ + if (!isFileReplaceOperation()) { throw new FileReplaceException("Only use this for File Replace Operations"); } @@ -928,20 +963,20 @@ public String saveReplacementFile() throws FileReplaceException{ } // Sanity check 1 // - if (fileReplacePageHelper == null){ + if (fileReplacePageHelper == null) { throw new NullPointerException("fileReplacePageHelper cannot be null"); } - + // Make sure phase 1 ran -- button shouldn't be visible if it did not // - if (!fileReplacePageHelper.wasPhase1Successful()){ + if (!fileReplacePageHelper.wasPhase1Successful()) { throw new FileReplaceException("Save should only be called when a replacement file has been chosen. (Phase 1 has to have completed)"); - + } // Run save!! // - if (fileReplacePageHelper.runSaveReplacementFile_Phase2()){ + if (fileReplacePageHelper.runSaveReplacementFile_Phase2()) { JsfHelper.addSuccessMessage(getBundleString("file.message.replaceSuccess")); // It worked!!! Go to page of new file!! if (Referrer.FILE == referrer) { @@ -949,19 +984,19 @@ public String saveReplacementFile() throws FileReplaceException{ } else { return returnToDraftVersion(); } - }else{ + } else { // Uh oh. String errMsg = fileReplacePageHelper.getErrorMessages(); - + FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, getBundleString("dataset.save.fail"), errMsg)); logger.severe("Dataset save failed for replace operation: " + errMsg); return null; } - - } - + + } + public String save() { - + Collection duplicates = IngestUtil.findDuplicateFilenames(workingVersion, newFiles); if (!duplicates.isEmpty()) { JH.addMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.message.filesFailure"), BundleUtil.getStringFromBundle("dataset.message.editMetadata.duplicateFilenames", new ArrayList<>(duplicates))); @@ -977,12 +1012,12 @@ public String save() { if (dataset.isLockedFor(DatasetLock.Reason.EditInProgress) || lockTest.isLockedFor(DatasetLock.Reason.EditInProgress)) { logger.log(Level.INFO, "Couldn''t save dataset: {0}", "It is locked." + ""); - JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.locked.editInProgress.message"),BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); + JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.locked.editInProgress.message"), BundleUtil.getStringFromBundle("dataset.locked.editInProgress.message.details", Arrays.asList(BrandingUtil.getSupportTeamName(null)))); return null; } } - if (isFileReplaceOperation()){ + if (isFileReplaceOperation()) { try { return saveReplacementFile(); } catch (FileReplaceException ex) { @@ -990,14 +1025,13 @@ public String save() { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, getBundleString("dataset.save.fail"), errMsg)); logger.log(Level.SEVERE, "Dataset save failed for replace operation: {0}", errMsg); return null; - } + } } - int nOldFiles = workingVersion.getFileMetadatas().size(); int nNewFiles = newFiles.size(); - int nExpectedFilesTotal = nOldFiles + nNewFiles; - + int nExpectedFilesTotal = nOldFiles + nNewFiles; + if (nNewFiles > 0) { //SEK 10/15/2018 only apply the following tests if dataset has already been saved. if (dataset.getId() != null) { @@ -1019,7 +1053,7 @@ public String save() { } } } - + // Try to save the NEW files permanently: List filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles, null, true); @@ -1029,13 +1063,13 @@ public String save() { for (DataFile addedFile : filesAdded) { fileMetadatas.add(addedFile.getFileMetadata()); } - filesAdded = null; + filesAdded = null; } //boolean newDraftVersion = false; Boolean provJsonChanges = false; - - if(systemConfig.isProvCollectionEnabled()) { + + if (systemConfig.isProvCollectionEnabled()) { Boolean provFreeChanges = provPopupFragmentBean.updatePageMetadatasWithProvFreeform(fileMetadatas); try { @@ -1092,7 +1126,6 @@ public String save() { // to update the tags in the database (issue #2798). // TODO: Is the above still true/is this still necessary? // (and why?...) - if (tabularDataTagsUpdated) { for (int i = 0; i < dataset.getFiles().size(); i++) { for (FileMetadata fileMetadata : fileMetadatas) { @@ -1160,15 +1193,15 @@ public String save() { logger.info("starting async job for obtaining persistent ids for files."); datasetService.obtainPersistentIdentifiersForDatafiles(dataset); - */ + */ } - + workingVersion = dataset.getEditVersion(); - logger.fine("working version id: "+workingVersion.getId()); - - if (FileEditMode.EDIT == mode && Referrer.FILE == referrer){ + logger.fine("working version id: " + workingVersion.getId()); + + if (FileEditMode.EDIT == mode && Referrer.FILE == referrer) { JsfHelper.addSuccessMessage(getBundleString("file.message.editSuccess")); - + } else { int nFilesTotal = workingVersion.getFileMetadatas().size(); if (nNewFiles == 0 || nFilesTotal == nExpectedFilesTotal) { @@ -1197,50 +1230,47 @@ public String save() { versionString = "DRAFT"; return returnToFileLandingPage(); } - + logger.fine("Redirecting to the dataset page, from the edit/upload page."); return returnToDraftVersion(); } - - public boolean canPublishDataset(){ + + public boolean canPublishDataset() { return permissionsWrapper.canIssuePublishDatasetCommand(dataset); } - - private void populateDatasetUpdateFailureMessage(){ - + + private void populateDatasetUpdateFailureMessage() { + JH.addMessage(FacesMessage.SEVERITY_FATAL, getBundleString("dataset.message.filesFailure")); } - - - - private String returnToDraftVersion(){ - return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&version=DRAFT&faces-redirect=true"; + + private String returnToDraftVersion() { + return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&version=DRAFT&faces-redirect=true"; } - - private String returnToDatasetOnly(){ - dataset = datasetService.find(dataset.getId()); - return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&faces-redirect=true"; + + private String returnToDatasetOnly() { + dataset = datasetService.find(dataset.getId()); + return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&faces-redirect=true"; } - + private String returnToFileLandingPage() { - Long fileId = fileMetadatas.get(0).getDataFile().getId(); - if (versionString != null && versionString.equals("DRAFT")){ - return "/file.xhtml?fileId=" + fileId + "&version=DRAFT&faces-redirect=true"; + Long fileId = fileMetadatas.get(0).getDataFile().getId(); + if (versionString != null && versionString.equals("DRAFT")) { + return "/file.xhtml?fileId=" + fileId + "&version=DRAFT&faces-redirect=true"; } - return "/file.xhtml?fileId=" + fileId + "&faces-redirect=true"; + return "/file.xhtml?fileId=" + fileId + "&faces-redirect=true"; } private String returnToFileLandingPageAfterReplace(DataFile newFile) { - - if (newFile == null){ + + if (newFile == null) { throw new NullPointerException("newFile cannot be null!"); } //Long datasetVersionId = newFile.getOwner().getLatestVersion().getId(); - return "/file.xhtml?fileId=" + newFile.getId() + "&version=DRAFT&faces-redirect=true"; + return "/file.xhtml?fileId=" + newFile.getId() + "&version=DRAFT&faces-redirect=true"; } - public String cancel() { uploadInProgress.setValue(false); //Files that have been finished and are now in the lower list on the page @@ -1262,7 +1292,6 @@ public String cancel() { return returnToDatasetOnly(); } - /* deprecated; super inefficient, when called repeatedly on a long list of files! leaving the code here, commented out, for illustration purposes. -- 4.6 @@ -1297,7 +1326,6 @@ public boolean isDuplicate(FileMetadata fileMetadata) { return MD5Map.get(thisMd5) != null && MD5Map.get(thisMd5).intValue() > 1; }*/ - private HttpClient getClient() { // TODO: // cache the http client? -- L.A. 4.0 alpha @@ -1306,50 +1334,48 @@ private HttpClient getClient() { /** * Is this page in File Replace mode - * - * @return + * + * @return */ - public boolean isFileReplaceOperation(){ - return (mode == FileEditMode.REPLACE)&&(fileReplacePageHelper!= null); + public boolean isFileReplaceOperation() { + return (mode == FileEditMode.REPLACE) && (fileReplacePageHelper != null); } - - public boolean allowMultipleFileUpload(){ - + + public boolean allowMultipleFileUpload() { + return !isFileReplaceOperation(); } - - public boolean showFileUploadFragment(){ + + public boolean showFileUploadFragment() { return mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE || mode == FileEditMode.REPLACE; } - - - public boolean showFileUploadComponent(){ + + public boolean showFileUploadComponent() { if (mode == FileEditMode.UPLOAD || mode == FileEditMode.CREATE) { - return true; + return true; } - - if (isFileReplaceOperation()){ + + if (isFileReplaceOperation()) { //msg("fileReplacePageHelper.showFileUploadComponent(): "+ fileReplacePageHelper.showFileUploadComponent()); return fileReplacePageHelper.showFileUploadComponent(); - } + } return false; //return false; } - /** * Download a file from drop box - * + * * @param fileLink - * @return + * @return */ - private InputStream getDropBoxInputStream(String fileLink, GetMethod dropBoxMethod){ - - if (fileLink == null){ + private InputStream getDropBoxInputStream(String fileLink, GetMethod dropBoxMethod) { + + if (fileLink == null) { return null; } - + // ----------------------------------------------------------- // Make http call, download the file: // ----------------------------------------------------------- @@ -1364,17 +1390,16 @@ private InputStream getDropBoxInputStream(String fileLink, GetMethod dropBoxMeth } catch (IOException ex) { logger.log(Level.WARNING, "Failed to access DropBox url: {0}!", fileLink); return null; - } + } logger.log(Level.WARNING, "Failed to get DropBox InputStream for file: {0}", fileLink); return null; } // end: getDropBoxInputStream - - + /** * Using information from the DropBox choose, ingest the chosen files - * https://www.dropbox.com/developers/dropins/chooser/js - * + * https://www.dropbox.com/developers/dropins/chooser/js + * * @param event */ public void handleDropBoxUpload(ActionEvent event) { @@ -1383,7 +1408,7 @@ public void handleDropBoxUpload(ActionEvent event) { } logger.fine("handleDropBoxUpload"); uploadComponentId = event.getComponent().getClientId(); - + // ----------------------------------------------------------- // Read JSON object from the output of the DropBox Chooser: // ----------------------------------------------------------- @@ -1396,7 +1421,7 @@ public void handleDropBoxUpload(ActionEvent event) { // ----------------------------------------------------------- DataFile dFile = null; GetMethod dropBoxMethod = null; - String localWarningMessage = null; + String localWarningMessage = null; for (int i = 0; i < dbArray.size(); i++) { JsonObject dbObject = dbArray.getJsonObject(i); @@ -1427,7 +1452,6 @@ public void handleDropBoxUpload(ActionEvent event) { continue; // skip to next file, and add error mesage } - dFile = null; dropBoxMethod = new GetMethod(fileLink); @@ -1435,24 +1459,23 @@ public void handleDropBoxUpload(ActionEvent event) { // Download the file // ----------------------------------------------------------- InputStream dropBoxStream = this.getDropBoxInputStream(fileLink, dropBoxMethod); - if (dropBoxStream==null){ + if (dropBoxStream == null) { logger.severe("Could not retrieve dropgox input stream for: " + fileLink); continue; // Error skip this file } - + // ----------------------------------------------------------- // Is this a FileReplaceOperation? If so, then diverge! // ----------------------------------------------------------- - if (this.isFileReplaceOperation()){ - this.handleReplaceFileUpload(event, dropBoxStream, fileName, FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT, null, event); - this.setFileMetadataSelectedForTagsPopup(fileReplacePageHelper.getNewFileMetadatasBeforeSave().get(0)); - return; - } + if (this.isFileReplaceOperation()) { + this.handleReplaceFileUpload(event, dropBoxStream, fileName, FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT, null, event); + this.setFileMetadataSelectedForTagsPopup(fileReplacePageHelper.getNewFileMetadatasBeforeSave().get(0)); + return; + } // ----------------------------------------------------------- - - List datafiles = new ArrayList<>(); - + List datafiles = new ArrayList<>(); + // ----------------------------------------------------------- // Send it through the ingest service // ----------------------------------------------------------- @@ -1462,8 +1485,8 @@ public void handleDropBoxUpload(ActionEvent event) { // for example, multiple files can be extracted from an uncompressed // zip file. //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - datafiles = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null,null, systemConfig); - + datafiles = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig); + } catch (IOException ex) { this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}", new Object[]{fileName, fileLink}); continue; @@ -1474,11 +1497,11 @@ public void handleDropBoxUpload(ActionEvent event) { // ----------------------------------------------------------- // release connection for dropBoxMethod // ----------------------------------------------------------- - + if (dropBoxMethod != null) { dropBoxMethod.releaseConnection(); } - + // ----------------------------------------------------------- // close the dropBoxStream // ----------------------------------------------------------- @@ -1488,11 +1511,11 @@ public void handleDropBoxUpload(ActionEvent event) { logger.log(Level.WARNING, "Failed to close the dropBoxStream for file: {0}", fileLink); } } - - if (datafiles == null){ + + if (datafiles == null) { this.logger.log(Level.SEVERE, "Failed to create DataFile for DropBox file {0} from link {1}", new Object[]{fileName, fileLink}); continue; - }else{ + } else { // ----------------------------------------------------------- // Check if there are duplicate files or ingest warnings // ----------------------------------------------------------- @@ -1508,14 +1531,14 @@ public void handleDropBoxUpload(ActionEvent event) { } }*/ } - if(uploadInProgress.isFalse()) { + if (uploadInProgress.isFalse()) { logger.warning("Upload in progress cancelled"); for (DataFile newFile : datafiles) { FileUtil.deleteTempFile(newFile, dataset, ingestService); } } } - + if (localWarningMessage != null) { if (uploadWarningMessage == null) { uploadWarningMessage = localWarningMessage; @@ -1524,18 +1547,18 @@ public void handleDropBoxUpload(ActionEvent event) { } } } - + public void uploadStarted() { // uploadStarted() is triggered by PrimeFaces s3io = FileUtil.getS3AccessForDirectUpload(dataset); - if(s3io == null) { - FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), "Direct upload not supported for this dataset")); + if (s3io == null) { + FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), "Direct upload not supported for this dataset")); } String url = null; String storageIdentifier = null; try { - url = s3io.generateTemporaryS3UploadUrl(); - storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); + url = s3io.generateTemporaryS3UploadUrl(); + storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); + } catch (IOException io) { + logger.warning(io.getMessage()); + FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), "Issue in connecting to S3 store for direct upload")); + } + + PrimeFaces.current().executeScript("uploadFileDirectly('" + url + "','" + storageIdentifier + "')"); + } + + public void requestDirectUploadUrls() { + + Map paramMap = FacesContext.getCurrentInstance().getExternalContext().getRequestParameterMap(); + + String sizeString = paramMap.get("fileSize"); + long fileSize = Long.parseLong(sizeString); + + S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); + if (s3io == null) { + FacesContext.getCurrentInstance().addMessage(uploadComponentId, + new FacesMessage(FacesMessage.SEVERITY_ERROR, + BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), + "Direct upload not supported for this dataset")); + } + JsonObjectBuilder urls = null; + String storageIdentifier = null; + try { + storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); + urls = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize); + } catch (IOException io) { - logger.warning(io.getMessage()); - FacesContext.getCurrentInstance().addMessage(uploadComponentId, new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), "Issue in connecting to S3 store for direct upload")); - } + logger.warning(io.getMessage()); + FacesContext.getCurrentInstance().addMessage(uploadComponentId, + new FacesMessage(FacesMessage.SEVERITY_ERROR, + BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), + "Issue in connecting to S3 store for direct upload")); + } - PrimeFaces.current().executeScript("uploadFileDirectly('" + url + "','" + storageIdentifier + "')"); + PrimeFaces.current().executeScript( + "uploadFileDirectly('" + urls.build().toString() + "','" + storageIdentifier + "','" + fileSize + "')"); } - - public void requestDirectUploadUrls() { - - Map paramMap = FacesContext.getCurrentInstance().getExternalContext().getRequestParameterMap(); - - String sizeString = paramMap.get("fileSize"); - long fileSize = Long.parseLong(sizeString); - - S3AccessIO s3io = FileUtil.getS3AccessForDirectUpload(dataset); - if (s3io == null) { - FacesContext.getCurrentInstance().addMessage(uploadComponentId, - new FacesMessage(FacesMessage.SEVERITY_ERROR, - BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), - "Direct upload not supported for this dataset")); - } - JsonObjectBuilder urls = null; - String storageIdentifier = null; - try { - storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); - urls = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize); - - } catch (IOException io) { - logger.warning(io.getMessage()); - FacesContext.getCurrentInstance().addMessage(uploadComponentId, - new FacesMessage(FacesMessage.SEVERITY_ERROR, - BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), - "Issue in connecting to S3 store for direct upload")); - } - - PrimeFaces.current().executeScript( - "uploadFileDirectly('" + urls.build().toString() + "','" + storageIdentifier + "','" + fileSize + "')"); - } public void uploadFinished() { // This method is triggered from the page, by the { df.setMarkedAsDuplicate(false); }); - - if (event == null){ + + if (event == null) { throw new NullPointerException("event cannot be null"); } - + UploadedFile uFile = event.getFile(); - if (uFile == null){ + if (uFile == null) { throw new NullPointerException("uFile cannot be null"); } - /** * For File Replace, take a different code path */ - if (isFileReplaceOperation()){ + if (isFileReplaceOperation()) { handleReplaceFileUpload(event, uFile.getInputStream(), - uFile.getFileName(), - uFile.getContentType(), - event, - null); - if( fileReplacePageHelper.wasPhase1Successful() && fileReplacePageHelper.hasContentTypeWarning()){ - //RequestContext context = RequestContext.getCurrentInstance(); - //RequestContext.getCurrentInstance().update("datasetForm:fileTypeDifferentPopup"); - //context.execute("PF('fileTypeDifferentPopup').show();"); - PrimeFaces.current().ajax().update("datasetForm:fileTypeDifferentPopup"); - PrimeFaces.current().executeScript("PF('fileTypeDifferentPopup').show();"); + uFile.getFileName(), + uFile.getContentType(), + event, + null); + if (fileReplacePageHelper.wasPhase1Successful() && fileReplacePageHelper.hasContentTypeWarning()) { + //RequestContext context = RequestContext.getCurrentInstance(); + //RequestContext.getCurrentInstance().update("datasetForm:fileTypeDifferentPopup"); + //context.execute("PF('fileTypeDifferentPopup').show();"); + PrimeFaces.current().ajax().update("datasetForm:fileTypeDifferentPopup"); + PrimeFaces.current().executeScript("PF('fileTypeDifferentPopup').show();"); } /* @@ -1983,24 +1995,24 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { PrimeFaces.current().ajax().update("datasetForm:fileTypeDifferentPopup"); PrimeFaces.current().executeScript("PF('fileTypeDifferentPopup').show();"); } - */ + */ return; - + } - List dFileList = null; - + try { // Note: A single uploaded file may produce multiple datafiles - // for example, multiple files can be extracted from an uncompressed // zip file. dFileList = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig); - + } catch (IOException ioex) { logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage()); return; - } /*catch (FileExceedsMaxSizeException ex) { + } + /*catch (FileExceedsMaxSizeException ex) { logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ex.getMessage()); return; }*/ @@ -2010,16 +2022,16 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { // already in the dataset/already uploaded, and to correct duplicate file names, etc. // ----------------------------------------------------------- String warningMessage = processUploadedFileList(dFileList); - - if (warningMessage != null){ + + if (warningMessage != null) { uploadWarningMessage = warningMessage; FacesContext.getCurrentInstance().addMessage(event.getComponent().getClientId(), new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.uploadWarning"), warningMessage)); // save the component id of the p:upload widget, so that we could // send an info message there, from elsewhere in the code: uploadComponentId = event.getComponent().getClientId(); } - - if(uploadInProgress.isFalse()) { + + if (uploadInProgress.isFalse()) { logger.warning("Upload in progress cancelled"); for (DataFile newFile : dFileList) { FileUtil.deleteTempFile(newFile, dataset, ingestService); @@ -2029,14 +2041,14 @@ public void handleFileUpload(FileUploadEvent event) throws IOException { /** * Using information from the DropBox choose, ingest the chosen files - * https://www.dropbox.com/developers/dropins/chooser/js - * + * https://www.dropbox.com/developers/dropins/chooser/js + * * @param event */ public void handleExternalUpload() { - Map paramMap = FacesContext.getCurrentInstance().getExternalContext().getRequestParameterMap(); - - this.uploadComponentId = paramMap.get("uploadComponentId"); + Map paramMap = FacesContext.getCurrentInstance().getExternalContext().getRequestParameterMap(); + + this.uploadComponentId = paramMap.get("uploadComponentId"); String fullStorageIdentifier = paramMap.get("fullStorageIdentifier"); String fileName = paramMap.get("fileName"); String contentType = paramMap.get("contentType"); @@ -2047,106 +2059,104 @@ public void handleExternalUpload() { checksumType = ChecksumType.fromString(checksumTypeString); } int lastColon = fullStorageIdentifier.lastIndexOf(':'); - String storageLocation= fullStorageIdentifier.substring(0,lastColon) + "/" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + fullStorageIdentifier.substring(lastColon+1); - if (uploadInProgress.isFalse()) { - uploadInProgress.setValue(true); - } - logger.fine("handleExternalUpload"); - - StorageIO sio; - String localWarningMessage = null; - try { - sio = DataAccess.getDirectStorageIO(storageLocation); - - //Populate metadata - sio.open(DataAccessOption.READ_ACCESS); - //get file size - long fileSize = sio.getSize(); - - if(StringUtils.isEmpty(contentType)) { - contentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; - } - - /* ---------------------------- + String storageLocation = fullStorageIdentifier.substring(0, lastColon) + "/" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + fullStorageIdentifier.substring(lastColon + 1); + if (uploadInProgress.isFalse()) { + uploadInProgress.setValue(true); + } + logger.fine("handleExternalUpload"); + + StorageIO sio; + String localWarningMessage = null; + try { + sio = DataAccess.getDirectStorageIO(storageLocation); + + //Populate metadata + sio.open(DataAccessOption.READ_ACCESS); + //get file size + long fileSize = sio.getSize(); + + if (StringUtils.isEmpty(contentType)) { + contentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; + } + + /* ---------------------------- Check file size - Max size NOT specified in db: default is unlimited - Max size specified in db: check too make sure file is within limits // ---------------------------- */ - if ((!this.isUnlimitedUploadFileSize()) && (fileSize > this.getMaxFileUploadSizeInBytes())) { - String warningMessage = "Uploaded file \"" + fileName + "\" exceeded the limit of " + fileSize + " bytes and was not uploaded."; - sio.delete(); - localWarningMessage = warningMessage; - } else { - // ----------------------------------------------------------- - // Is this a FileReplaceOperation? If so, then diverge! - // ----------------------------------------------------------- - if (this.isFileReplaceOperation()){ - this.handleReplaceFileUpload(storageLocation, fileName, contentType, checksumValue, checksumType); - if (fileReplacePageHelper.getNewFileMetadatasBeforeSave() != null){ - this.setFileMetadataSelectedForTagsPopup(fileReplacePageHelper.getNewFileMetadatasBeforeSave().get(0)); - } - return; - } - // ----------------------------------------------------------- - List datafiles = new ArrayList<>(); - - // ----------------------------------------------------------- - // Send it through the ingest service - // ----------------------------------------------------------- - try { - - // Note: A single uploaded file may produce multiple datafiles - - // for example, multiple files can be extracted from an uncompressed - // zip file. - //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); - - - datafiles = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error during ingest of file {0}", new Object[]{fileName}); - } - - if (datafiles == null){ - logger.log(Level.SEVERE, "Failed to create DataFile for file {0}", new Object[]{fileName}); - }else{ - // ----------------------------------------------------------- - // Check if there are duplicate files or ingest warnings - // ----------------------------------------------------------- - uploadWarningMessage = processUploadedFileList(datafiles); - } - if(uploadInProgress.isFalse()) { - logger.warning("Upload in progress cancelled"); - for (DataFile newFile : datafiles) { - FileUtil.deleteTempFile(newFile, dataset, ingestService); - } - } - } - } catch (IOException e) { - logger.log(Level.SEVERE, "Failed to create DataFile for file {0}: {1}", new Object[]{fileName, e.getMessage()}); - } - if (localWarningMessage != null) { - if (uploadWarningMessage == null) { - uploadWarningMessage = localWarningMessage; - } else { - uploadWarningMessage = localWarningMessage.concat("; " + uploadWarningMessage); - } - } + if ((!this.isUnlimitedUploadFileSize()) && (fileSize > this.getMaxFileUploadSizeInBytes())) { + String warningMessage = "Uploaded file \"" + fileName + "\" exceeded the limit of " + fileSize + " bytes and was not uploaded."; + sio.delete(); + localWarningMessage = warningMessage; + } else { + // ----------------------------------------------------------- + // Is this a FileReplaceOperation? If so, then diverge! + // ----------------------------------------------------------- + if (this.isFileReplaceOperation()) { + this.handleReplaceFileUpload(storageLocation, fileName, contentType, checksumValue, checksumType); + if (fileReplacePageHelper.getNewFileMetadatasBeforeSave() != null) { + this.setFileMetadataSelectedForTagsPopup(fileReplacePageHelper.getNewFileMetadatasBeforeSave().get(0)); + } + return; + } + // ----------------------------------------------------------- + List datafiles = new ArrayList<>(); + + // ----------------------------------------------------------- + // Send it through the ingest service + // ----------------------------------------------------------- + try { + + // Note: A single uploaded file may produce multiple datafiles - + // for example, multiple files can be extracted from an uncompressed + // zip file. + //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream"); + datafiles = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig); + } catch (IOException ex) { + logger.log(Level.SEVERE, "Error during ingest of file {0}", new Object[]{fileName}); + } + + if (datafiles == null) { + logger.log(Level.SEVERE, "Failed to create DataFile for file {0}", new Object[]{fileName}); + } else { + // ----------------------------------------------------------- + // Check if there are duplicate files or ingest warnings + // ----------------------------------------------------------- + uploadWarningMessage = processUploadedFileList(datafiles); + } + if (uploadInProgress.isFalse()) { + logger.warning("Upload in progress cancelled"); + for (DataFile newFile : datafiles) { + FileUtil.deleteTempFile(newFile, dataset, ingestService); + } + } + } + } catch (IOException e) { + logger.log(Level.SEVERE, "Failed to create DataFile for file {0}: {1}", new Object[]{fileName, e.getMessage()}); + } + if (localWarningMessage != null) { + if (uploadWarningMessage == null) { + uploadWarningMessage = localWarningMessage; + } else { + uploadWarningMessage = localWarningMessage.concat("; " + uploadWarningMessage); + } + } } - + /** - * After uploading via the site or Dropbox, - * check the list of DataFile objects - * @param dFileList + * After uploading via the site or Dropbox, check the list of DataFile + * objects + * + * @param dFileList */ - - private String existingFilesWithDupeContent = null; + private String existingFilesWithDupeContent = null; private String uploadedFilesWithDupeContentToExisting = null; private String uploadedFilesWithDupeContentToNewlyUploaded = null; private String newlyUploadedFilesWithDupeContent = null; - + private boolean multipleDupesExisting = false; private boolean multipleDupesNew = false; - + public String getExistingFilesWithDupeContent() { return existingFilesWithDupeContent; } @@ -2179,7 +2189,6 @@ public void setNewlyUploadedFilesWithDupeContent(String newlyUploadedFilesWithDu this.newlyUploadedFilesWithDupeContent = newlyUploadedFilesWithDupeContent; } - public boolean isMultipleDupesExisting() { return multipleDupesExisting; } @@ -2200,7 +2209,7 @@ private String processUploadedFileList(List dFileList) { if (dFileList == null) { return null; } - + uploadedInThisProcess = new ArrayList(); DataFile dataFile; @@ -2208,7 +2217,6 @@ private String processUploadedFileList(List dFileList) { // NOTE: for native file uploads, the dFileList will only // contain 1 file--method is called for every file even if the UI shows "simultaneous uploads" - // ----------------------------------------------------------- // Iterate through list of DataFile objects // ----------------------------------------------------------- @@ -2233,10 +2241,9 @@ private String processUploadedFileList(List dFileList) { // or if another file with the same checksum has already been // uploaded. // ----------------------------------------------------------- - if (isFileAlreadyInDataset(dataFile)) { DataFile existingFile = fileAlreadyExists.get(dataFile); - + // String alreadyExists = dataFile.getFileMetadata().getLabel() + " at " + existingFile.getDirectoryLabel() != null ? existingFile.getDirectoryLabel() + "/" + existingFile.getDisplayName() : existingFile.getDisplayName(); String uploadedDuplicateFileName = dataFile.getFileMetadata().getLabel(); String existingFileName = existingFile.getDisplayName(); @@ -2259,7 +2266,7 @@ private String processUploadedFileList(List dFileList) { } else if (isFileAlreadyUploaded(dataFile)) { DataFile existingFile = checksumMapNew.get(dataFile.getChecksumValue()); - String alreadyUploadedWithSame = existingFile.getDisplayName(); + String alreadyUploadedWithSame = existingFile.getDisplayName(); String newlyUploadedDupe = dataFile.getFileMetadata().getLabel(); if (newlyUploadedFilesWithDupeContent == null) { newlyUploadedFilesWithDupeContent = newlyUploadedDupe; @@ -2285,13 +2292,13 @@ private String processUploadedFileList(List dFileList) { dataFile.setPreviewImageAvailable(true); } uploadedFiles.add(dataFile); - */ + */ // We are NOT adding the fileMetadata to the list that is being used // to render the page; we'll do that once we know that all the individual uploads // in this batch (as in, a bunch of drag-and-dropped files) have finished. //fileMetadatas.add(dataFile.getFileMetadata()); } - + dataFile.getFileMetadata().setLabel(duplicateFilenameCheck(dataFile.getFileMetadata())); if (isTemporaryPreviewAvailable(dataFile.getStorageIdentifier(), dataFile.getContentType())) { dataFile.setPreviewImageAvailable(true); @@ -2334,7 +2341,7 @@ private String processUploadedFileList(List dFileList) { } } */ } - + // ----------------------------------------------------------- // Format error message for duplicate files // (note the separate messages for the files already in the dataset, @@ -2344,10 +2351,10 @@ private String processUploadedFileList(List dFileList) { String duplicateFilesErrorMessage = null; List args = Arrays.asList(uploadedFilesWithDupeContentToExisting, existingFilesWithDupeContent); - if (multipleDupesExisting) { + if (multipleDupesExisting) { duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.files.exist", args); } else { - duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.file.exist", args); + duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.file.exist", args); } if (warningMessage == null) { warningMessage = duplicateFilesErrorMessage; @@ -2360,10 +2367,10 @@ private String processUploadedFileList(List dFileList) { String duplicateFilesErrorMessage = null; List args = Arrays.asList(newlyUploadedFilesWithDupeContent, uploadedFilesWithDupeContentToNewlyUploaded); - if (multipleDupesNew) { + if (multipleDupesNew) { duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.files.duplicate", args); } else { - duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.file.duplicate", args); + duplicateFilesErrorMessage = BundleUtil.getStringFromBundle("dataset.file.duplicate", args); } if (warningMessage == null) { warningMessage = duplicateFilesErrorMessage; @@ -2379,39 +2386,39 @@ private String processUploadedFileList(List dFileList) { return null; } - + private Map temporaryThumbnailsMap = new HashMap<>(); - + public boolean isTemporaryPreviewAvailable(String fileSystemId, String mimeType) { if (temporaryThumbnailsMap.get(fileSystemId) != null && !temporaryThumbnailsMap.get(fileSystemId).isEmpty()) { return true; } - + if ("".equals(temporaryThumbnailsMap.get(fileSystemId))) { // we've already looked once - and there's no thumbnail. return false; } - + String filesRootDirectory = System.getProperty("dataverse.files.directory"); if (filesRootDirectory == null || filesRootDirectory.isEmpty()) { filesRootDirectory = "/tmp/files"; } String fileSystemName = filesRootDirectory + "/temp/" + fileSystemId; - + String imageThumbFileName = null; - + // ATTENTION! TODO: the current version of the method below may not be checking if files are already cached! if ("application/pdf".equals(mimeType)) { imageThumbFileName = ImageThumbConverter.generatePDFThumbnailFromFile(fileSystemName, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE); } else if (mimeType != null && mimeType.startsWith("image/")) { imageThumbFileName = ImageThumbConverter.generateImageThumbnailFromFile(fileSystemName, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE); } - + if (imageThumbFileName != null) { File imageThumbFile = new File(imageThumbFileName); if (imageThumbFile.exists()) { - String previewAsBase64 = ImageThumbConverter.getImageAsBase64FromFile(imageThumbFile); + String previewAsBase64 = ImageThumbConverter.getImageAsBase64FromFile(imageThumbFile); if (previewAsBase64 != null) { temporaryThumbnailsMap.put(fileSystemId, previewAsBase64); return true; @@ -2420,28 +2427,28 @@ public boolean isTemporaryPreviewAvailable(String fileSystemId, String mimeType) } } } - + return false; } - + public String getTemporaryPreviewAsBase64(String fileSystemId) { return temporaryThumbnailsMap.get(fileSystemId); } - private Set fileLabelsExisting = null; - + private Set fileLabelsExisting = null; + private String duplicateFilenameCheck(FileMetadata fileMetadata) { if (fileLabelsExisting == null) { fileLabelsExisting = IngestUtil.existingPathNamesAsSet(workingVersion); } - + return IngestUtil.duplicateFilenameCheck(fileMetadata, fileLabelsExisting); } - private Map checksumMapOld = null; // checksums of the files already in the dataset - private Map checksumMapNew = null; // checksums of the new files already uploaded + private Map checksumMapOld = null; // checksums of the files already in the dataset + private Map checksumMapNew = null; // checksums of the new files already uploaded private Map fileAlreadyExists = null; - + private void initChecksumMap() { checksumMapOld = new HashMap<>(); @@ -2459,26 +2466,25 @@ private void initChecksumMap() { } } - + private boolean isFileAlreadyInDataset(DataFile dataFile) { if (checksumMapOld == null) { initChecksumMap(); } - + if (fileAlreadyExists == null) { fileAlreadyExists = new HashMap<>(); } - String chksum = dataFile.getChecksumValue(); - - if(checksumMapOld.get(chksum) != null){ - fileAlreadyExists.put(dataFile, checksumMapOld.get(chksum)); - } - + + if (checksumMapOld.get(chksum) != null) { + fileAlreadyExists.put(dataFile, checksumMapOld.get(chksum)); + } + return chksum == null ? false : checksumMapOld.get(chksum) != null; } - + private boolean isFileAlreadyUploaded(DataFile dataFile) { if (checksumMapNew == null) { @@ -2486,10 +2492,9 @@ private boolean isFileAlreadyUploaded(DataFile dataFile) { } return FileUtil.isFileAlreadyUploaded(dataFile, checksumMapNew, fileAlreadyExists); - + } - - + public boolean isLocked() { if (dataset != null) { logger.log(Level.FINE, "checking lock status of dataset {0}", dataset.getId()); @@ -2498,19 +2503,19 @@ public boolean isLocked() { // version of the dataset is locked: } Dataset lookedupDataset = datasetService.find(dataset.getId()); - - if ( (lookedupDataset!=null) && lookedupDataset.isLocked() ) { + + if ((lookedupDataset != null) && lookedupDataset.isLocked()) { logger.fine("locked!"); return true; } } return false; } - + private Boolean lockedFromEditsVar; - + public boolean isLockedFromEdits() { - if(null == lockedFromEditsVar ) { + if (null == lockedFromEditsVar) { try { permissionService.checkEditDatasetLock(dataset, dvRequestService.getDataverseRequest(), new UpdateDatasetVersionCommand(dataset, dvRequestService.getDataverseRequest())); lockedFromEditsVar = false; @@ -2520,16 +2525,15 @@ public boolean isLockedFromEdits() { } return lockedFromEditsVar; } - + // Methods for edit functions that are performed on one file at a time, // in popups that block the rest of the page: - private FileMetadata fileMetadataSelected = null; - public void setFileMetadataSelected(FileMetadata fm){ - setFileMetadataSelected(fm, null); + public void setFileMetadataSelected(FileMetadata fm) { + setFileMetadataSelected(fm, null); } - + public void setFileMetadataSelected(FileMetadata fm, String guestbook) { fileMetadataSelected = fm; @@ -2548,27 +2552,26 @@ public FileMetadata getFileMetadataSelected() { public void clearFileMetadataSelected() { fileMetadataSelected = null; } - - public boolean isDesignatedDatasetThumbnail (FileMetadata fileMetadata) { + + public boolean isDesignatedDatasetThumbnail(FileMetadata fileMetadata) { if (fileMetadata != null) { if (fileMetadata.getDataFile() != null) { if (fileMetadata.getDataFile().getId() != null) { //if (fileMetadata.getDataFile().getOwner() != null) { - if (fileMetadata.getDataFile().equals(dataset.getThumbnailFile())) { - return true; - } + if (fileMetadata.getDataFile().equals(dataset.getThumbnailFile())) { + return true; + } //} } } } return false; } - + /* * Items for the "Designated this image as the Dataset thumbnail: */ - - private FileMetadata fileMetadataSelectedForThumbnailPopup = null; + private FileMetadata fileMetadataSelectedForThumbnailPopup = null; /** * @param fm @@ -2578,22 +2581,22 @@ public boolean isDesignatedDatasetThumbnail (FileMetadata fileMetadata) { * button is in editFilesFragment.xhtml and it would be nice to move it to * Java since it's getting long and a bit complicated. */ - public void setFileMetadataSelectedForThumbnailPopup(FileMetadata fm){ - fileMetadataSelectedForThumbnailPopup = fm; - alreadyDesignatedAsDatasetThumbnail = getUseAsDatasetThumbnail(); + public void setFileMetadataSelectedForThumbnailPopup(FileMetadata fm) { + fileMetadataSelectedForThumbnailPopup = fm; + alreadyDesignatedAsDatasetThumbnail = getUseAsDatasetThumbnail(); } - + public FileMetadata getFileMetadataSelectedForThumbnailPopup() { return fileMetadataSelectedForThumbnailPopup; } - + public void clearFileMetadataSelectedForThumbnailPopup() { fileMetadataSelectedForThumbnailPopup = null; } - - private boolean alreadyDesignatedAsDatasetThumbnail = false; - + + private boolean alreadyDesignatedAsDatasetThumbnail = false; + public boolean getUseAsDatasetThumbnail() { return isDesignatedDatasetThumbnail(fileMetadataSelectedForThumbnailPopup); @@ -2656,24 +2659,23 @@ public boolean isThumbnailIsFromDatasetLogoRatherThanDatafile() { * Items for the "Tags (Categories)" popup. * */ - private FileMetadata fileMetadataSelectedForTagsPopup = null; + private FileMetadata fileMetadataSelectedForTagsPopup = null; - public void setFileMetadataSelectedForTagsPopup(FileMetadata fm){ + public void setFileMetadataSelectedForTagsPopup(FileMetadata fm) { fileMetadataSelectedForTagsPopup = fm; } - + public FileMetadata getFileMetadataSelectedForTagsPopup() { return fileMetadataSelectedForTagsPopup; } - + public void clearFileMetadataSelectedForTagsPopup() { fileMetadataSelectedForTagsPopup = null; } - + /* * 1. Tabular File Tags: */ - private List tabFileTags = null; public List getTabFileTags() { @@ -2686,7 +2688,7 @@ public List getTabFileTags() { public void setTabFileTags(List tabFileTags) { this.tabFileTags = tabFileTags; } - + private String[] selectedTabFileTags = {}; public String[] getSelectedTabFileTags() { @@ -2698,13 +2700,20 @@ public void setSelectedTabFileTags(String[] selectedTabFileTags) { } private String[] selectedTags = {}; - - public void refreshTagsPopUp(FileMetadata fm){ + + public void refreshTagsPopUp(FileMetadata fm) { + if(!isHasValidTermsOfAccess()){ + PrimeFaces.current().executeScript("PF('blockDatasetForm').show()"); + PrimeFaces.current().executeScript("PF('accessPopup').show()"); + JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.editTerms.label"), BundleUtil.getStringFromBundle("dataset.message.editTerms.message")); + return; + } setFileMetadataSelectedForTagsPopup(fm); refreshCategoriesByName(); refreshTabFileTagsByName(); + PrimeFaces.current().executeScript("PF('editFileTagsPopup').show()"); } - + private List tabFileTagsByName; public List getTabFileTagsByName() { @@ -2714,7 +2723,7 @@ public List getTabFileTagsByName() { public void setTabFileTagsByName(List tabFileTagsByName) { this.tabFileTagsByName = tabFileTagsByName; } - + private void refreshTabFileTagsByName() { tabFileTagsByName = new ArrayList<>(); if (fileMetadataSelectedForTagsPopup.getDataFile().getTags() != null) { @@ -2745,8 +2754,7 @@ private void refreshCategoriesByName(){ } refreshSelectedTags(); } - - + private List categoriesByName; public List getCategoriesByName() { @@ -2756,7 +2764,7 @@ public List getCategoriesByName() { public void setCategoriesByName(List categoriesByName) { this.categoriesByName = categoriesByName; } - + private void refreshSelectedTags() { selectedTags = null; selectedTags = new String[0]; @@ -2786,13 +2794,10 @@ public String[] getSelectedTags() { public void setSelectedTags(String[] selectedTags) { this.selectedTags = selectedTags; } - - /* * "File Tags" (aka "File Categories"): - */ - + */ private String newCategoryName = null; public String getNewCategoryName() { @@ -2802,9 +2807,9 @@ public String getNewCategoryName() { public void setNewCategoryName(String newCategoryName) { this.newCategoryName = newCategoryName; } - + public String saveNewCategory() { - + if (newCategoryName != null && !newCategoryName.isEmpty()) { categoriesByName.add(newCategoryName); } @@ -2817,24 +2822,30 @@ public String saveNewCategory() { newCategoryName = ""; return ""; } + + public void handleSelection(final AjaxBehaviorEvent event) { + if (selectedTags != null) { + selectedTags = selectedTags.clone(); + } + } /* This method handles saving both "tabular file tags" and * "file categories" (which are also considered "tags" in 4.0) - */ + */ public void saveFileTagsAndCategories() { if (fileMetadataSelectedForTagsPopup == null) { logger.fine("No FileMetadata selected for the categories popup"); - return; + return; } // 1. File categories: /* In order to get the cancel button to work we had to separate the selected tags from the file metadata and re-add them on save - */ - + */ + fileMetadataSelectedForTagsPopup.setCategories(new ArrayList<>()); - + // New, custom file category (if specified): if (newCategoryName != null) { logger.fine("Adding new category, " + newCategoryName + " for file " + fileMetadataSelectedForTagsPopup.getLabel()); @@ -2843,17 +2854,16 @@ public void saveFileTagsAndCategories() { logger.fine("no category specified"); } newCategoryName = null; - + // File Categories selected from the list of existing categories: if (selectedTags != null) { for (String selectedTag : selectedTags) { - + fileMetadataSelectedForTagsPopup.addCategoryByName(selectedTag); } - } - + } + // 2. Tabular DataFile Tags: - if (fileMetadataSelectedForTagsPopup.getDataFile() != null && tabularDataTagsUpdated && selectedTabFileTags != null) { fileMetadataSelectedForTagsPopup.getDataFile().setTags(null); for (String selectedTabFileTag : selectedTabFileTags) { @@ -2868,39 +2878,39 @@ public void saveFileTagsAndCategories() { } } } - + fileMetadataSelectedForTagsPopup = null; } - + public void handleFileCategoriesSelection(final AjaxBehaviorEvent event) { if (selectedTags != null) { selectedTags = selectedTags.clone(); } } - + public void handleTabularTagsSelection(final AjaxBehaviorEvent event) { tabularDataTagsUpdated = true; } - + /* * Items for the "Advanced (Ingest) Options" popup. * */ - private FileMetadata fileMetadataSelectedForIngestOptionsPopup = null; + private FileMetadata fileMetadataSelectedForIngestOptionsPopup = null; - public void setFileMetadataSelectedForIngestOptionsPopup(FileMetadata fm){ - fileMetadataSelectedForIngestOptionsPopup = fm; + public void setFileMetadataSelectedForIngestOptionsPopup(FileMetadata fm) { + fileMetadataSelectedForIngestOptionsPopup = fm; } - + public FileMetadata getFileMetadataSelectedForIngestOptionsPopup() { return fileMetadataSelectedForIngestOptionsPopup; } - + public void clearFileMetadataSelectedForIngestOptionsPopup() { fileMetadataSelectedForIngestOptionsPopup = null; } - + private String ingestLanguageEncoding = null; public String getIngestLanguageEncoding() { @@ -3012,21 +3022,18 @@ public void saveAdvancedOptions() { } public boolean rsyncUploadSupported() { - // ToDo - rsync was written before multiple store support and currently is hardcoded to use the "s3" store. - // When those restrictions are lifted/rsync can be configured per store, the test in the + // ToDo - rsync was written before multiple store support and currently is hardcoded to use the "s3" store. + // When those restrictions are lifted/rsync can be configured per store, the test in the // Dataset Util method should be updated - if(settingsWrapper.isRsyncUpload() && !DatasetUtil.isAppropriateStorageDriver(dataset) ){ - //dataset.file.upload.setUp.rsync.failed.detail - FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.upload.setUp.rsync.failed"), BundleUtil.getStringFromBundle("dataset.file.upload.setUp.rsync.failed.detail")); - FacesContext.getCurrentInstance().addMessage(null, message); - } - + if (settingsWrapper.isRsyncUpload() && !DatasetUtil.isAppropriateStorageDriver(dataset)) { + //dataset.file.upload.setUp.rsync.failed.detail + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_ERROR, BundleUtil.getStringFromBundle("dataset.file.upload.setUp.rsync.failed"), BundleUtil.getStringFromBundle("dataset.file.upload.setUp.rsync.failed.detail")); + FacesContext.getCurrentInstance().addMessage(null, message); + } - - return settingsWrapper.isRsyncUpload() && DatasetUtil.isAppropriateStorageDriver(dataset); + return settingsWrapper.isRsyncUpload() && DatasetUtil.isAppropriateStorageDriver(dataset); } - - + private void populateFileMetadatas() { fileMetadatas = new ArrayList<>(); if (selectedFileIdsList == null || selectedFileIdsList.isEmpty()) { @@ -3039,9 +3046,9 @@ private void populateFileMetadatas() { fileMetadatas.add(fmd); } } - } - } - + } + } + private String termsOfAccess; private boolean fileAccessRequest; @@ -3059,5 +3066,5 @@ public boolean isFileAccessRequest() { public void setFileAccessRequest(boolean fileAccessRequest) { this.fileAccessRequest = fileAccessRequest; - } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 2b9476b5d4e..ca8a5d74b40 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -241,6 +241,13 @@ public String init() { return permissionsWrapper.notFound(); } + + hasRestrictedFiles = fileMetadata.getDatasetVersion().isHasRestrictedFile(); + hasValidTermsOfAccess = null; + hasValidTermsOfAccess = isHasValidTermsOfAccess(); + if(!hasValidTermsOfAccess && canUpdateDataset() ){ + JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("dataset.message.editMetadata.invalid.TOUA.message")); + } return null; } @@ -363,19 +370,28 @@ public String restrictFile(boolean restricted) throws CommandException{ editDataset = this.file.getOwner(); if (restricted) { // get values from access popup editDataset.getEditVersion().getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); - editDataset.getEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); + editDataset.getEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); } - - Command cmd; - for (FileMetadata fmw : editDataset.getEditVersion().getFileMetadatas()) { - if (fmw.getDataFile().equals(this.fileMetadata.getDataFile())) { - fileNames += fmw.getLabel(); - //fmw.setRestricted(restricted); - cmd = new RestrictFileCommand(fmw.getDataFile(), dvRequestService.getDataverseRequest(), restricted); - commandEngine.submit(cmd); + //using this method to update the terms for datasets that are out of compliance + // with Terms of Access requirement - may get her with a file that is already restricted + // we'll allow it + try { + Command cmd; + for (FileMetadata fmw : editDataset.getEditVersion().getFileMetadatas()) { + if (fmw.getDataFile().equals(this.fileMetadata.getDataFile())) { + fileNames += fmw.getLabel(); + cmd = new RestrictFileCommand(fmw.getDataFile(), dvRequestService.getDataverseRequest(), restricted); + commandEngine.submit(cmd); + } } - } - + + } catch (CommandException ex) { + if (ex.getLocalizedMessage().contains("is already restricted")) { + //ok we're just updating the terms here + } else { + throw ex; + } + } if (fileNames != null) { String successMessage = BundleUtil.getStringFromBundle("file.restricted.success"); successMessage = successMessage.replace("{0}", fileNames); @@ -584,7 +600,7 @@ public void setTermsMet(boolean termsMet) { public String save() { // Validate - Set constraintViolations = this.fileMetadata.getDatasetVersion().validate(); + Set constraintViolations = editDataset.getEditVersion().validate(); if (!constraintViolations.isEmpty()) { //JsfHelper.addFlashMessage(JH.localize("dataset.message.validationError")); fileDeleteInProgress = false; @@ -732,6 +748,43 @@ public void setSelectedTabIndex(int selectedTabIndex) { this.selectedTabIndex = selectedTabIndex; } + private Boolean hasValidTermsOfAccess = null; + + public Boolean isHasValidTermsOfAccess() { + //cache in page to limit processing + if (hasValidTermsOfAccess != null){ + return hasValidTermsOfAccess; + } else { + if (!isHasRestrictedFiles()){ + hasValidTermsOfAccess = true; + return hasValidTermsOfAccess; + } else { + hasValidTermsOfAccess = TermsOfUseAndAccessValidator.isTOUAValid(fileMetadata.getDatasetVersion().getTermsOfUseAndAccess(), null); + return hasValidTermsOfAccess; + } + } + } + + public boolean getHasValidTermsOfAccess(){ + return isHasValidTermsOfAccess(); //HasValidTermsOfAccess + } + + public void setHasValidTermsOfAccess(boolean value){ + //dummy for ui + } + + private Boolean hasRestrictedFiles = null; + + public Boolean isHasRestrictedFiles(){ + //cache in page to limit processing + if (hasRestrictedFiles != null){ + return hasRestrictedFiles; + } else { + hasRestrictedFiles = fileMetadata.getDatasetVersion().isHasRestrictedFile(); + return hasRestrictedFiles; + } + } + public boolean isSwiftStorage () { Boolean swiftBool = false; if (file.getStorageIdentifier().startsWith("swift://")){ @@ -841,6 +894,9 @@ public boolean isLockedFromEdits() { } catch (IllegalCommandException ex) { lockedFromEditsVar = true; } + if (!isHasValidTermsOfAccess()){ + lockedFromEditsVar = true; + } } return lockedFromEditsVar; } diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java index 030b1078594..da9bbfd8374 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Template.java +++ b/src/main/java/edu/harvard/iq/dataverse/Template.java @@ -323,6 +323,10 @@ public Template cloneNewTemplate(Template source) { TermsOfUseAndAccess terms = null; if(source.getTermsOfUseAndAccess() != null){ terms = source.getTermsOfUseAndAccess().copyTermsOfUseAndAccess(); + } else { + terms = new TermsOfUseAndAccess(); + // terms.setLicense(TermsOfUseAndAccess.defaultLicense); + terms.setFileAccessRequest(true); } newTemplate.setTermsOfUseAndAccess(terms); return newTemplate; diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java index ab9b6601bbb..a8616283332 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java @@ -14,6 +14,7 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToOne; +import javax.persistence.Transient; import edu.harvard.iq.dataverse.license.License; @@ -23,6 +24,7 @@ * @author skraffmi */ @Entity +@ValidateTermsOfUseAndAccess public class TermsOfUseAndAccess implements Serializable { @Id @@ -287,11 +289,17 @@ private void clearCustomTermsVariables(){ disclaimer = null; } - /** - * @todo What does the GUI use for a default license? What does the "native" - * API use? See also https://github.com/IQSS/dataverse/issues/1385 - */ + @Transient + private String validationMessage; + public String getValidationMessage() { + return validationMessage; + } + + public void setValidationMessage(String validationMessage) { + this.validationMessage = validationMessage; + } + @Override public int hashCode() { int hash = 0; diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java index dfa9e9f6c77..ee094d772e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.util.BundleUtil; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; @@ -12,21 +13,41 @@ * * @author skraffmi */ -public class TermsOfUseAndAccessValidator implements ConstraintValidator { +public class TermsOfUseAndAccessValidator implements ConstraintValidator { @Override public void initialize(ValidateTermsOfUseAndAccess constraintAnnotation) { - + } @Override public boolean isValid(TermsOfUseAndAccess value, ConstraintValidatorContext context) { - //if both null invalid - //if(value.getTemplate() == null && value.getDatasetVersion() == null) return false; - //if both not null invalid - //return !(value.getTemplate() != null && value.getDatasetVersion() != null); - return true; + return isTOUAValid(value, context); + + } + + public static boolean isTOUAValid(TermsOfUseAndAccess value, ConstraintValidatorContext context){ + //If there are no restricted files then terms are valid + if (!value.getDatasetVersion().isHasRestrictedFile()) { + return true; + } + /*If there are restricted files then the version + must allow access requests or have terms of access filled in. + */ + boolean valid = value.isFileAccessRequest() == true || (value.getTermsOfAccess() != null && !value.getTermsOfAccess().isEmpty()); + if (!valid) { + try { + if (context != null) { + context.buildConstraintViolationWithTemplate(BundleUtil.getStringFromBundle("toua.invalid")).addConstraintViolation(); + } + + value.setValidationMessage(BundleUtil.getStringFromBundle("toua.invalid")); + } catch (NullPointerException e) { + return false; + } + return false; + } + return valid; } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 8c67aecc242..153d3f266b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -624,6 +624,7 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, final DatasetVersion editVersion = ds.getEditVersion(); editVersion.setDatasetFields(incomingVersion.getDatasetFields()); editVersion.setTermsOfUseAndAccess( incomingVersion.getTermsOfUseAndAccess() ); + editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion); Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); managedVersion = managedDataset.getEditVersion(); } else { @@ -683,7 +684,7 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String DatasetVersion dsv = ds.getEditVersion(); boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc); - + dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); DatasetVersion managedVersion; if (updateDraft) { Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); @@ -712,6 +713,7 @@ public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) { DatasetVersion dsv = ds.getEditVersion(); boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc); + dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); DatasetVersion managedVersion; if (updateDraft) { Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); @@ -747,7 +749,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion dsv = ds.getEditVersion(); - + dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); DatasetField singleField = null; @@ -911,7 +913,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion dsv = ds.getEditVersion(); - + dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); List fields = new LinkedList<>(); DatasetField singleField = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 9b652c29032..d15b0f1c48f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -249,6 +249,8 @@ public Response createDataset(String jsonBody, @PathParam("identifier") String p version.setMinorVersionNumber(null); version.setVersionNumber(null); version.setVersionState(DatasetVersion.VersionState.DRAFT); + version.getTermsOfUseAndAccess().setFileAccessRequest(true); + version.getTermsOfUseAndAccess().setDatasetVersion(version); ds.setAuthority(null); ds.setIdentifier(null); @@ -307,6 +309,8 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie version.setMinorVersionNumber(null); version.setVersionNumber(null); version.setVersionState(DatasetVersion.VersionState.DRAFT); + version.getTermsOfUseAndAccess().setFileAccessRequest(true); + version.getTermsOfUseAndAccess().setDatasetVersion(version); ds.setAuthority(null); ds.setIdentifier(null); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java index b4c227c9812..9fb372d7121 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java @@ -74,6 +74,7 @@ public Statement getStatement(String editUri, Map map, AuthCrede // in the statement, the element is called "updated" Date lastUpdatedFinal = new Date(); Date lastUpdateTime = dataset.getLatestVersion().getLastUpdateTime(); + dataset.getLatestVersion().getTermsOfUseAndAccess().setDatasetVersion(dataset.getLatestVersion()); if (lastUpdateTime != null) { lastUpdatedFinal = lastUpdateTime; } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java index 46c38e04153..ad33e635c91 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java @@ -152,6 +152,8 @@ public void setDatasetLicenseAndTermsOfUse(DatasetVersion datasetVersionToMutate List listOfLicensesProvided = dcterms.get("license"); TermsOfUseAndAccess terms = new TermsOfUseAndAccess(); datasetVersionToMutate.setTermsOfUseAndAccess(terms); + terms.setDatasetVersion(datasetVersionToMutate); + if (listOfLicensesProvided == null) { License existingLicense = datasetVersionToMutate.getTermsOfUseAndAccess().getLicense(); if (existingLicense != null) { @@ -170,7 +172,9 @@ public void setDatasetLicenseAndTermsOfUse(DatasetVersion datasetVersionToMutate } } terms.setLicense(defaultLicense); - setTermsOfUse(datasetVersionToMutate, dcterms, null); + terms.setFileAccessRequest(datasetVersionToMutate.getTermsOfUseAndAccess().isFileAccessRequest()); + terms.setDatasetVersion(datasetVersionToMutate); + setTermsOfUse(datasetVersionToMutate, dcterms, defaultLicense); } return; } @@ -233,4 +237,4 @@ private void setTermsOfUse(DatasetVersion datasetVersionToMutate, Map cv.getMessage() + " (Invalid value:" + cv.getInvalidValue() + ")") .collect(joining(", ", "Validation Failed: ", ".")); + + validationMessage += constraintViolations.stream() + .filter(cv -> cv.getRootBean() instanceof TermsOfUseAndAccess) + .map(cv -> cv.toString()); throw new IllegalCommandException(validationMessage, this); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 5830ab89537..772b6205b02 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -35,7 +35,7 @@ public class CuratePublishedDatasetVersionCommand extends AbstractDatasetCommand { private static final Logger logger = Logger.getLogger(CuratePublishedDatasetVersionCommand.class.getCanonicalName()); - final private boolean validateLenient = true; + final private boolean validateLenient = false; public CuratePublishedDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest) { super(aRequest, theDataset); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index 01ac0cf5804..52b7e1c5376 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -84,7 +84,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { validateDataFiles(theDataset, ctxt); // (this will throw a CommandException if it fails) } - + + validateOrDie(theDataset.getLatestVersion(), false); + /* * Try to register the dataset identifier. For PID providers that have registerWhenPublished == false (all except the FAKE provider at present) * the registerExternalIdentifier command will make one try to create the identifier if needed (e.g. if reserving at dataset creation wasn't done/failed). diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 665172a1e9f..8a0e9b91066 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -70,8 +70,9 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException // When importing a released dataset, the latest version is marked as RELEASED. Dataset theDataset = getDataset(); - + validateOrDie(theDataset.getLatestVersion(), false); + //ToDo - any reason to set the version in publish versus finalize? Failure in a prepub workflow or finalize will leave draft versions with an assigned version number as is. //Changing the dataset in this transaction also potentially makes a race condition with a prepub workflow, possibly resulting in an OptimisticLockException there. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index e7f0e699fa8..989850f8620 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -374,7 +374,7 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th terms.setStudyCompletion(obj.getString("studyCompletion", null)); terms.setFileAccessRequest(obj.getBoolean("fileAccessRequest", false)); dsv.setTermsOfUseAndAccess(terms); - + terms.setDatasetVersion(dsv); dsv.setDatasetFields(parseMetadataBlocks(obj.getJsonObject("metadataBlocks"))); JsonArray filesJson = obj.getJsonArray("files"); diff --git a/src/main/java/propertyFiles/.LCKBundle.properties~ b/src/main/java/propertyFiles/.LCKBundle.properties~ new file mode 100644 index 00000000000..bb86722b21e --- /dev/null +++ b/src/main/java/propertyFiles/.LCKBundle.properties~ @@ -0,0 +1 @@ +/Users/skraffmi/NetBeansProjects/dataverse/src/main/java/propertyFiles/Bundle.properties \ No newline at end of file diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 5035325ee8d..31a58ae65b1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1474,6 +1474,8 @@ dataset.message.uploadFilesMultiple.message=Multiple file upload/download method dataset.message.editMetadata.label=Edit Dataset Metadata dataset.message.editMetadata.message=Add more metadata about this dataset to help others easily find it. dataset.message.editMetadata.duplicateFilenames=Duplicate filenames: {0} +dataset.message.editMetadata.invalid.TOUA.message=Datasets with restricted files are required to have Request Access enabled or Terms of Access to help people access the data. Please edit the dataset to confirm Request Access or provide Terms of Access to be in compliance with the policy. + dataset.message.editTerms.label=Edit Dataset Terms dataset.message.editTerms.message=Add the terms of use for this dataset to explain how to access and use your data. dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. @@ -1771,7 +1773,10 @@ file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Any additional information that will assist the user in understanding the access and use conditions of the Dataset. file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Disclaimer file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Information regarding responsibility for uses of the Dataset. -file.dataFilesTab.terms.list.termsOfAccess.header=Additional Access Information +file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.description=Restricting limits access to published files. People who want to use the restricted files can request access by default. If you disable request access, you must add information about access to the Terms of Access field. +file.dataFilesTab.terms.list.termsOfAccess.description.line.2=Learn about restricting files and dataset access in the User Guide. + file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. @@ -1781,6 +1786,7 @@ file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Users may request access to files. file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Users may not request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.warning.outofcompliance=You must enable request access or add terms of access to restrict file access. file.dataFilesTab.terms.list.termsOfAccess.embargoed=Files are unavailable during the specified embargo. file.dataFilesTab.terms.list.termsOfAccess.embargoedthenrestricted=Files are unavailable during the specified embargo and restricted after that. file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Enable access request @@ -2142,8 +2148,11 @@ ingest.csv.nullStream=Stream can't be null. citationFrame.banner.countdownMessage.seconds=seconds #file-edit-popup-fragment.xhtml #editFilesFragment.xhtml -dataset.access.accessHeader=Restrict Files -dataset.access.description=Restricting limits access to published files. You can add or edit Terms of Access for Restricted Files and allow people to Request Access to those files. +dataset.access.accessHeader=Restrict Access +dataset.access.accessHeader.invalid.state=Define Data Access +dataset.access.description=Restricting limits access to published files. People who want to use the restricted files can request access by default. +dataset.access.description.disable=If you disable request access, you must add information about access to the Terms of Access field. +dataset.access.description.line.2=Learn about restricting files and dataset access in the User Guide. #datasetFieldForEditFragment.xhtml dataset.AddReplication=Add "Replication Data for" to Title @@ -2496,6 +2505,7 @@ access.api.allowRequests.failure.noSave=Problem saving dataset {0}: {1} access.api.allowRequests.allows=allows access.api.allowRequests.disallows=disallows access.api.allowRequests.success=Dataset {0} {1} file access requests. +access.api.fileAccess.failure.noSave=Could not update Request Access for {0} Error Message {1} access.api.fileAccess.failure.noUser=Could not find user to execute command: {0} access.api.requestAccess.failure.commandError=Problem trying request access on {0} : {1} access.api.requestAccess.failure.requestExists=An access request for this file on your behalf already exists. diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index 0874f6939d9..a5930ccf35f 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -1,20 +1,20 @@ - -
- - - #{bundle['file.dataFilesTab.terms.editTermsBtn']} - + xmlns:h="http://java.sun.com/jsf/html" + xmlns:f="http://java.sun.com/jsf/core" + xmlns:ui="http://java.sun.com/jsf/facelets" + xmlns:c="http://java.sun.com/jsp/jstl/core" + xmlns:p="http://primefaces.org/ui" + xmlns:o="http://omnifaces.org/ui" + xmlns:jsf="http://xmlns.jcp.org/jsf"> + +
+ + + #{bundle['file.dataFilesTab.terms.editTermsBtn']} +
@@ -231,12 +231,11 @@
- -
- - - - + + + + +
 
-
+
@@ -294,29 +293,69 @@
-
-
-
-
- -
-
-
+ + + +
#{bundle['file.dataFilesTab.terms.list.guestbook']}  
-
+