Skip to content

Commit

Permalink
Merge pull request #3497 from IQSS/3353-batch-job-import
Browse files Browse the repository at this point in the history
3353 batch job import
  • Loading branch information
kcondon authored Feb 9, 2017
2 parents cfc1c78 + a1c46ef commit 7162f5a
Show file tree
Hide file tree
Showing 21 changed files with 3,467 additions and 31 deletions.
6 changes: 6 additions & 0 deletions src/main/java/Bundle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,9 @@ notification.access.granted.fileDownloader.additionalDataset={0} You now have ac
notification.access.revoked.dataverse=You have been removed from a role in {0}.
notification.access.revoked.dataset=You have been removed from a role in {0}.
notification.access.revoked.datafile=You have been removed from a role in {0}.
notification.checksumfail=Your upload to dataset "{0}" failed checksum validation.
notification.import.filesystem=<a href="{0}/dataset.xhtml?persistentId={1}" title="{2}"&>{2}</a>, dataset had files imported from the file system via a batch job.
notification.import.checksum=<a href="/dataset.xhtml?persistentId={0}" title="{1}"&>{1}</a>, dataset had file checksums added via a batch job.
removeNotification=Remove Notification
groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned.
user.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files.
Expand Down Expand Up @@ -528,6 +531,9 @@ hours=hours
hour=hour
minutes=minutes
minute=minute
notification.email.checksumfail.subject=Dataverse: Your upload failed checksum validation.
notification.email.import.filesystem.subject=Dataverse: Your file import job has completed
notification.email.import.checksum.subject=Dataverse: Your file checksum job has completed

# passwordreset.xhtml

Expand Down
7 changes: 7 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/DataFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -633,6 +633,13 @@ public boolean isImage() {
// generate thumbnails and previews for them)
return (contentType != null && (contentType.startsWith("image/") || contentType.equalsIgnoreCase("application/pdf")));
}

public boolean isFilePackage() {
if (DataFileServiceBean.MIME_TYPE_PACKAGE_FILE.equalsIgnoreCase(contentType)) {
return true;
}
return false;
}

public void setIngestStatus(char ingestStatus) {
this.ingestStatus = ingestStatus;
Expand Down
30 changes: 29 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,17 @@ public class DataFileServiceBean implements java.io.Serializable {

private static final String MIME_TYPE_UNDETERMINED_DEFAULT = "application/octet-stream";
private static final String MIME_TYPE_UNDETERMINED_BINARY = "application/binary";

/**
* Per https://en.wikipedia.org/wiki/Media_type#Vendor_tree just "dataverse"
* should be fine.
*
* @todo Consider registering this at http://www.iana.org/form/media-types
* or switch to "prs" which "includes media types created experimentally or
* as part of products that are not distributed commercially" according to
* the page URL above.
*/
public static final String MIME_TYPE_PACKAGE_FILE = "application/vnd.dataverse.file-package";

public DataFile find(Object pk) {
return (DataFile) em.find(DataFile.class, pk);
Expand Down Expand Up @@ -168,7 +179,24 @@ public List<DataFile> findByDatasetId(Long studyId) {
Query query = em.createQuery("select o from DataFile o where o.owner.id = :studyId order by o.id");
query.setParameter("studyId", studyId);
return query.getResultList();
}
}

public DataFile findByStorageIdandDatasetVersion(String storageId, DatasetVersion dv) {
try {
Query query = em.createNativeQuery("select o.id from datafile o, filemetadata m " +
"where o.filesystemname = '" + storageId + "' and o.id = m.datafile_id and m.datasetversion_id = " +
dv.getId() + "");
query.setMaxResults(1);
if (query.getResultList().size() < 1) {
return null;
} else {
return findCheapAndEasy((Long) query.getSingleResult());
}
} catch (Exception e) {
logger.log(Level.SEVERE, "Error finding datafile by storageID and DataSetVersion: " + e.getMessage());
return null;
}
}

public List<FileMetadata> findFileMetadataByDatasetVersionId(Long datasetVersionId, int maxResults, String userSuppliedSortField, String userSuppliedSortOrder) {
FileSortFieldAndOrder sortFieldAndOrder = new FileSortFieldAndOrder(userSuppliedSortField, userSuppliedSortOrder);
Expand Down
40 changes: 40 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,12 @@ private String getSubjectTextBasedOnNotification(UserNotification userNotificati
return ResourceBundle.getBundle("Bundle").getString("notification.email.returned.dataset.subject");
case CREATEACC:
return ResourceBundle.getBundle("Bundle").getString("notification.email.create.account.subject");
case CHECKSUMFAIL:
return ResourceBundle.getBundle("Bundle").getString("notification.email.checksumfail.subject");
case FILESYSTEMIMPORT:
return ResourceBundle.getBundle("Bundle").getString("notification.email.import.filesystem.subject");
case CHECKSUMIMPORT:
return ResourceBundle.getBundle("Bundle").getString("notification.email.import.checksum.subject");
}
return "";
}
Expand Down Expand Up @@ -435,6 +441,34 @@ private String getMessageTextBasedOnNotification(UserNotification userNotificati
accountCreatedMessage += optionalConfirmEmailAddon;
logger.fine("accountCreatedMessage: " + accountCreatedMessage);
return messageText += accountCreatedMessage;

case CHECKSUMFAIL:
version = (DatasetVersion) targetObject;
String checksumFailMsg = BundleUtil.getStringFromBundle("notification.checksumfail", Arrays.asList(
version.getDataset().getGlobalId()
));
logger.info("checksumFailMsg: " + checksumFailMsg);
return messageText += checksumFailMsg;

case FILESYSTEMIMPORT:
version = (DatasetVersion) targetObject;
String fileImportMsg = BundleUtil.getStringFromBundle("notification.import.filesystem", Arrays.asList(
systemConfig.getDataverseSiteUrl(),
version.getDataset().getGlobalId(),
version.getDataset().getDisplayName()
));
logger.info("fileImportMsg: " + fileImportMsg);
return messageText += fileImportMsg;

case CHECKSUMIMPORT:
version = (DatasetVersion) targetObject;
String checksumImportMsg = BundleUtil.getStringFromBundle("notification.import.checksum", Arrays.asList(
version.getDataset().getGlobalId(),
version.getDataset().getDisplayName()
));
logger.info("checksumImportMsg: " + checksumImportMsg);
return messageText += checksumImportMsg;

}

return "";
Expand Down Expand Up @@ -465,6 +499,12 @@ private Object getObjectOfNotification (UserNotification userNotification){
return versionService.find(userNotification.getObjectId());
case CREATEACC:
return userNotification.getUser();
case CHECKSUMFAIL:
return datasetService.find(userNotification.getObjectId());
case FILESYSTEMIMPORT:
return versionService.find(userNotification.getObjectId());
case CHECKSUMIMPORT:
return versionService.find(userNotification.getObjectId());
}
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

public class UserNotification implements Serializable {
public enum Type {
ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, MAPLAYERUPDATED, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS
ASSIGNROLE, REVOKEROLE, CREATEDV, CREATEDS, CREATEACC, MAPLAYERUPDATED, SUBMITTEDDS, RETURNEDDS, PUBLISHEDDS, REQUESTFILEACCESS, GRANTFILEACCESS, REJECTFILEACCESS, FILESYSTEMIMPORT, CHECKSUMIMPORT, CHECKSUMFAIL
};

private static final long serialVersionUID = 1L;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
package edu.harvard.iq.dataverse.api.batchjob;

import com.fasterxml.jackson.databind.ObjectMapper;
import edu.harvard.iq.dataverse.api.AbstractApiBean;
import edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity;

import javax.batch.operations.JobOperator;
import javax.batch.runtime.BatchRuntime;
import javax.batch.runtime.JobExecution;
import javax.batch.runtime.JobInstance;
import javax.ejb.Stateless;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;


@Stateless
@Path("admin/batch")
public class BatchJobResource extends AbstractApiBean {

private static String EMPTY_JSON_LIST = "[]";
private static String EMPTY_JSON_OBJ = "{}";
private static ObjectMapper mapper = new ObjectMapper();

@GET
@Path("/jobs")
@Produces(MediaType.APPLICATION_JSON)
public Response listBatchJobs() {
try {
final List<JobExecutionEntity> executionEntities = new ArrayList<>();
final JobOperator jobOperator = BatchRuntime.getJobOperator();
final Set<String> names = jobOperator.getJobNames();
for (String name : names) {
final int end = jobOperator.getJobInstanceCount(name);
final List<JobInstance> jobInstances = jobOperator.getJobInstances(name, 0, end);
for (JobInstance jobInstance : jobInstances) {
final List<JobExecution> executions = jobOperator.getJobExecutions(jobInstance);
for (JobExecution execution : executions) {
executionEntities.add(JobExecutionEntity.create(execution));
}
}
}
return Response.ok("{ \"jobs\": \n" + mapper.writeValueAsString(executionEntities) + "\n}").build();
} catch (Exception e) {
return Response.ok(EMPTY_JSON_LIST).build();
}
}

@GET
@Path("/jobs/name/{jobName}")
@Produces(MediaType.APPLICATION_JSON)
public Response listBatchJobsByName( @PathParam("jobName") String jobName) {
try {
final List<JobExecutionEntity> executionEntities = new ArrayList<>();
final JobOperator jobOperator = BatchRuntime.getJobOperator();
final int end = jobOperator.getJobInstanceCount(jobName);
final List<JobInstance> jobInstances = jobOperator.getJobInstances(jobName, 0, end);
for (JobInstance jobInstance : jobInstances) {
final List<JobExecution> executions = jobOperator.getJobExecutions(jobInstance);
for (JobExecution execution : executions) {
executionEntities.add(JobExecutionEntity.create(execution));
}
}
return Response.ok("{ \"jobs\": \n" + mapper.writeValueAsString(executionEntities) + "\n}").build();
} catch (Exception e) {
return Response.ok(EMPTY_JSON_LIST).build();
}
}


@GET
@Path("/jobs/{jobId}")
@Produces(MediaType.APPLICATION_JSON)
public Response listBatchJobById(@PathParam("jobId") String jobId) {
try {
JobExecution execution = BatchRuntime.getJobOperator().getJobExecution(Long.valueOf(jobId));
return Response.ok(mapper.writeValueAsString(JobExecutionEntity.create(execution))).build();
} catch (Exception e) {
return Response.ok(EMPTY_JSON_OBJ).build();
}
}

}
Loading

0 comments on commit 7162f5a

Please sign in to comment.