Skip to content

Commit

Permalink
basic removal of linked dataverses/datasets #4406 #1364
Browse files Browse the repository at this point in the history
  • Loading branch information
ferrys committed Apr 11, 2018
1 parent 0391888 commit 1b44275
Show file tree
Hide file tree
Showing 5 changed files with 185 additions and 36 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetLinkingDataverse;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
import java.util.Collections;

/**
*
* @author sarahferry
*/

@RequiredPermissions( Permission.EditDataverse )
public class DeleteLinkedDatasetCommand extends AbstractCommand<Dataset>{
private final DatasetLinkingDataverse doomed;
private final Dataset editedDs;

public DeleteLinkedDatasetCommand(DataverseRequest aRequest, Dataset editedDs , DatasetLinkingDataverse doomed) {
super(aRequest, editedDs);
this.editedDs = editedDs;
this.doomed = doomed;
}

@Override
public Dataset execute(CommandContext ctxt) throws CommandException {
if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) {
throw new PermissionException("Move Dataset can only be called by superusers.",
this, Collections.singleton(Permission.DeleteDataverse), editedDs);
}
Dataset merged = ctxt.em().merge(editedDs);
DatasetLinkingDataverse doomedAndMerged = ctxt.em().merge(doomed);
ctxt.em().remove(doomedAndMerged);
return merged;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseLinkingDataverse;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
import java.util.Collections;

/**
*
* @author sarahferry
*/

@RequiredPermissions( Permission.EditDataverse )
public class DeleteLinkedDataverseCommand extends AbstractCommand<Dataverse> {

private final DataverseLinkingDataverse doomed;
private final Dataverse editedDv;

public DeleteLinkedDataverseCommand(DataverseRequest aRequest, Dataverse editedDv , DataverseLinkingDataverse doomed) {
super(aRequest, editedDv);
this.editedDv = editedDv;
this.doomed = doomed;
}

@Override
public Dataverse execute(CommandContext ctxt) throws CommandException {
if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) {
throw new PermissionException("Move Dataset can only be called by superusers.",
this, Collections.singleton(Permission.DeleteDataverse), editedDv);
}
Dataverse merged = ctxt.em().merge(editedDv);
DataverseLinkingDataverse doomedAndMerged = ctxt.em().merge(doomed);
ctxt.em().remove(doomedAndMerged);
return merged;
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetLinkingDataverse;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseFeaturedDataverse;
import edu.harvard.iq.dataverse.DataverseLinkingDataverse;
import edu.harvard.iq.dataverse.Guestbook;
import static edu.harvard.iq.dataverse.IdServiceBean.logger;
import edu.harvard.iq.dataverse.MetadataBlock;
Expand Down Expand Up @@ -59,7 +61,7 @@ public MoveDataverseCommand(DataverseRequest aRequest, Dataverse moved, Datavers
public void executeImpl(CommandContext ctxt) throws CommandException {
long moveDvStart = System.currentTimeMillis();
logger.info("Starting dataverse move...");
boolean removeGuestbook = false, removeTemplate = false, removeFeatDv = false, removeMetadataBlock = false;
boolean removeGuestbook = false, removeTemplate = false, removeFeatDv = false, removeMetadataBlock = false, removeLinkDv = false, removeLinkDs = false;

// first check if user is a superuser
if ((!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser())) {
Expand Down Expand Up @@ -89,32 +91,30 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
List<Dataverse> dataverseChildren = ctxt.dataverses().findAllDataverseDataverseChildren(moved);
dataverseChildren.add(moved); // include the root of the children


// generate list of all possible parent dataverses to check against
List<Dataverse> ownersToCheck = new ArrayList<>();
ownersToCheck.add(destination);
ownersToCheck.add(moved);
if (destination.getOwners() != null) {
ownersToCheck.addAll(destination.getOwners());
}

// if all the dataverse's datasets GUESTBOOKS are not contained in the new dataverse then remove the
// ones that aren't
List<Guestbook> destinationGbs = null;
if (moved.getGuestbooks() != null) {
logger.info("Checking guestbooks...");
List<Guestbook> movedGbs = moved.getGuestbooks();
List<Guestbook> destinationGbs = destination.getGuestbooks();
destinationGbs = destination.getGuestbooks();
boolean inheritGuestbooksValue = !destination.isGuestbookRoot();
if (inheritGuestbooksValue && destination.getOwner() != null) {
destinationGbs.addAll(destination.getParentGuestbooks());
}
// include guestbooks in moved dataverse since they will also be there
// in the destination
destinationGbs.addAll(movedGbs);
for (Dataset ds : datasetChildren) {
Guestbook dsgb = ds.getGuestbook();
if (dsgb != null && (destinationGbs == null || !destinationGbs.contains(dsgb))) {
if (force == null || !force) {
removeGuestbook = true;
break;
}
ds.setGuestbook(null);
}
}
}


// if the dataverse is FEATURED by its parent, remove it
List<DataverseFeaturedDataverse> ownerFeaturedDv = moved.getOwner().getDataverseFeaturedDataverses();
if (ownerFeaturedDv != null) {
Expand Down Expand Up @@ -144,26 +144,21 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
destinationTemplates.addAll(movedTemplates);
}


// if all the dataverses METADATA BLOCKS are not contained in the new dataverse then remove the
// ones that aren't available in the destination
// i.e. the case where a custom metadata block is available through a parent
// but then the dataverse is moved outside of that parent-child structure
List<Dataverse> ownersToCheck = null;
Boolean inheritMbValue = null;
List<Dataverse> mbParentsToCheck = new ArrayList<>();
mbParentsToCheck.addAll(ownersToCheck);
mbParentsToCheck.addAll(dataverseChildren);
if (moved.getMetadataBlocks() != null) {
inheritMbValue = !destination.isMetadataBlockRoot();
// generate list of all possible metadata block owner dataverses to check against
ownersToCheck = new ArrayList<>();
ownersToCheck.add(destination);
ownersToCheck.add(moved);
ownersToCheck.addAll(dataverseChildren);
if (destination.getOwners() != null) {
ownersToCheck.addAll(destination.getOwners());
}
}

logger.info("Checking templates and metadata blocks...");

List<DataverseLinkingDataverse> linkingDataverses = new ArrayList();

logger.info("Checking templates and metadata blocks");
for (Dataverse dv : dataverseChildren) {
if (destinationTemplates != null) {
Template dvt = dv.getDefaultTemplate();
Expand All @@ -175,10 +170,10 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
dv.setDefaultTemplate(null);
}
}

// determine which metadata blocks to keep selected
// on the moved dataverse and its children
if (ownersToCheck != null && inheritMbValue != null) {
if (inheritMbValue != null) {
List<MetadataBlock> metadataBlocksToKeep = new ArrayList<>();
List<MetadataBlock> movedMbs = dv.getMetadataBlocks(true);
Iterator<MetadataBlock> iter = movedMbs.iterator();
Expand All @@ -187,12 +182,12 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
// if the owner is null, it means that the owner is the root dataverse
// because technically only custom metadata blocks have owners
Dataverse mbOwner = (mb.getOwner() != null) ? mb.getOwner() : ctxt.dataverses().findByAlias(":root");
if (!ownersToCheck.contains(mbOwner)) {
if (!mbParentsToCheck.contains(mbOwner)) {
if (force == null || !force) {
removeMetadataBlock = true;
break;
}
} else if (ownersToCheck.contains(mbOwner) || inheritMbValue) {
} else if (mbParentsToCheck.contains(mbOwner) || inheritMbValue) {
// only keep metadata block if
// it is being inherited from its parent
metadataBlocksToKeep.add(mb);
Expand All @@ -202,9 +197,58 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
dv.setMetadataBlocks(metadataBlocksToKeep);
}
}

if (dv.getDataverseLinkingDataverses() != null) {
linkingDataverses.addAll(dv.getDataverseLinkingDataverses());
}
}

List<DatasetLinkingDataverse> linkingDatasets = new ArrayList();
logger.info("Checking guestbooks...");
for (Dataset ds : datasetChildren) {
Guestbook dsgb = ds.getGuestbook();
if (dsgb != null && (destinationGbs == null || !destinationGbs.contains(dsgb))) {
if (force == null || !force) {
removeGuestbook = true;
break;
}
ds.setGuestbook(null);
}
if (ds.getDatasetLinkingDataverses() != null) {
linkingDatasets.addAll(ds.getDatasetLinkingDataverses());
}
}

for (DataverseLinkingDataverse dvld : linkingDataverses) {
logger.info("Checking linked dataverses....");
for (Dataverse owner : ownersToCheck){
if ((dvld.getLinkingDataverse()).equals(owner)){
if (force == null || !force) {
removeLinkDv = true;
break;
}
ctxt.engine().submit(new DeleteLinkedDataverseCommand(getRequest(), dvld.getDataverse(), dvld));
(dvld.getDataverse()).getDataverseLinkingDataverses().remove(dvld);
}
}
}

if (removeGuestbook || removeTemplate || removeFeatDv || removeMetadataBlock) {
for (DatasetLinkingDataverse dsld : linkingDatasets) {
logger.info("Checking linked datasets...");
for (Dataverse owner : ownersToCheck){
if ((dsld.getLinkingDataverse()).equals(owner)){
if (force == null || !force) {
removeLinkDs = true;
break;
}
ctxt.engine().submit(new DeleteLinkedDatasetCommand(getRequest(), dsld.getDataset(), dsld));
(dsld.getDataset()).getDatasetLinkingDataverses().remove(dsld);
}
}
}


if (removeGuestbook || removeTemplate || removeFeatDv || removeMetadataBlock || removeLinkDv || removeLinkDs) {
StringBuilder errorString = new StringBuilder();
if (removeGuestbook) {
errorString.append("Dataset guestbook is not in target dataverse. ");
Expand All @@ -218,6 +262,12 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
if (removeMetadataBlock) {
errorString.append("Dataverse metadata block is not in target dataverse. ");
}
if (removeLinkDv) {
errorString.append("Dataverse is linked to target dataverse or one of its parents.");
}
if (removeLinkDs) {
errorString.append("Dataset is linked to target dataverse or one of its parents.");
}
errorString.append("Please use the parameter ?forceMove=true to complete the move. This will remove anything from the dataverse that is not compatible with the target dataverse.");
throw new IllegalCommandException(errorString.toString(), this);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,8 +185,7 @@ public void indexDataverseRecursively(Dataverse dataverse) {
indexDataverseRecursively(child);
}
long end = System.currentTimeMillis();
logger.info("Total time to index: " + (end - start));

logger.info("Time to index so far: " + (end - start));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseLinkingServiceBean;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.Guestbook;
import edu.harvard.iq.dataverse.MetadataBlock;
Expand Down Expand Up @@ -219,9 +220,7 @@ public Future<String> indexDataverseInNewTransaction(Dataverse dataverse){
@Override
public Future<String> indexDatasetInNewTransaction(Long id){
return null;
}


}
};

}
Expand All @@ -248,6 +247,12 @@ public List<Dataset> findByOwnerId(Long ownerId) {
public EntityManager em() {
return new NoOpTestEntityManager();
}
@Override
public DataverseLinkingServiceBean dvLinking() {
return new DataverseLinkingServiceBean() {

};
}
});
}

Expand Down

0 comments on commit 1b44275

Please sign in to comment.