Skip to content

Commit

Permalink
Merge pull request #3000 from IQSS/2606-citation-date
Browse files Browse the repository at this point in the history
added backend support for alternate citation dates  #2606
  • Loading branch information
scolapasta committed Mar 8, 2016
2 parents 10a905f + 44dd4f6 commit 0e6f43b
Show file tree
Hide file tree
Showing 8 changed files with 220 additions and 24 deletions.
11 changes: 11 additions & 0 deletions doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,17 @@ Deletes the draft version of dataset ``$id``. Only the draft version can be dele

DELETE http://$SERVER/api/datasets/$id/versions/:draft?key=$apiKey

Sets the dataset field type to be used as the citation date for the given dataset (if the dataset does not include the dataset field type, the default logic is used). The name of the dataset field type should be sent in the body of the reqeust.
To revert to the default logic, use ``:publicationDate`` as the ``$datasetFieldTypeName``.
Note that the dataset field used has to be a date field::

PUT http://$SERVER/api/datasets/$id/citationdate?key=$apiKey
Restores the default logic of the field type to be used as the citation date. Same as ``PUT`` with ``:publicationDate`` body::
DELETE http://$SERVER/api/datasets/$id/citationdate?key=$apiKey


Builtin Users
~~~~~

Expand Down
10 changes: 9 additions & 1 deletion scripts/database/upgrades/upgrade_v4.2.4_to_4.3.sql
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,12 @@ drop column sizeofcollection,
drop column specialpermissions,
drop column studycompletion,
drop column termsofaccess,
drop column termsofuse;
drop column termsofuse;

-- Add new foreign ket to dataset for citation date (from datasetfieldtype)
ALTER TABLE dataset ADD COLUMN citationdatedatasetfieldtype_id bigint;

ALTER TABLE dataset
ADD CONSTRAINT fk_dataset_citationdatedatasetfieldtype_id FOREIGN KEY (citationdatedatasetfieldtype_id)
REFERENCES datasetfieldtype (id) MATCH SIMPLE
ON UPDATE NO ACTION ON DELETE NO ACTION;
13 changes: 13 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/Dataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import javax.persistence.Entity;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
Expand Down Expand Up @@ -81,6 +82,18 @@ public void setDatasetLinkingDataverses(List<DatasetLinkingDataverse> datasetLin
private boolean fileAccessRequest;
@OneToMany(mappedBy = "dataset", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
private List<DataFileCategory> dataFileCategories = null;

@ManyToOne
@JoinColumn(name = "citationDateDatasetFieldType_id")
private DatasetFieldType citationDateDatasetFieldType;

public DatasetFieldType getCitationDateDatasetFieldType() {
return citationDateDatasetFieldType;
}

public void setCitationDateDatasetFieldType(DatasetFieldType citationDateDatasetFieldType) {
this.citationDateDatasetFieldType = citationDateDatasetFieldType;
}

public Dataset() {
//this.versions = new ArrayList();
Expand Down
11 changes: 10 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,16 @@ public List<Dataset> findAllOrSubset(long numPartitions, long partitionId, boole
typedQuery.setParameter("partitionId", partitionId);
return typedQuery.getResultList();
}


/**
* Merges the passed dataset to the persistence context.
* @param ds the dataset whose new state we want to persist.
* @return The managed entity representing {@code ds}.
*/
public Dataset merge( Dataset ds ) {
return em.merge(ds);
}

public Dataset findByGlobalId(String globalId) {

String protocol = "";
Expand Down
73 changes: 56 additions & 17 deletions src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
package edu.harvard.iq.dataverse;

import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
import edu.harvard.iq.dataverse.util.StringUtil;
import java.io.Serializable;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
Expand All @@ -12,6 +14,8 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
Expand Down Expand Up @@ -664,30 +668,40 @@ public String getCitation(boolean isOnlineVersion) {
} else {
str += getDatasetProducersString();
}

Date citationDate = getCitationDate();
if (citationDate != null) {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += new SimpleDateFormat("yyyy").format(citationDate);

} else {
if (this.getDataset().getPublicationDate() == null || StringUtil.isEmpty(this.getDataset().getPublicationDate().toString())) {

if (this.getDataset().getPublicationDate() == null || StringUtil.isEmpty(this.getDataset().getPublicationDate().toString())) {

if (!this.getDataset().isHarvested()) {
//if not released use current year
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += new SimpleDateFormat("yyyy").format(new Timestamp(new Date().getTime()));
} else {
String distDate = getDistributionDate();
if (distDate != null) {
if (!this.getDataset().isHarvested()) {
//if not released use current year
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += distDate;
str += new SimpleDateFormat("yyyy").format(new Timestamp(new Date().getTime()));
} else {
String distDate = getDistributionDate();
if (distDate != null) {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += distDate;
}
}
} else {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += new SimpleDateFormat("yyyy").format(new Timestamp(this.getDataset().getPublicationDate().getTime()));
}
} else {
if (!StringUtil.isEmpty(str)) {
str += ", ";
}
str += new SimpleDateFormat("yyyy").format(new Timestamp(this.getDataset().getPublicationDate().getTime()));
}

if (this.getTitle() != null) {
if (!StringUtil.isEmpty(this.getTitle())) {
if (!StringUtil.isEmpty(str)) {
Expand Down Expand Up @@ -780,6 +794,31 @@ public String getCitation(boolean isOnlineVersion) {
}*/
return str;
}

private Date getCitationDate() {
DatasetField citationDate = getDatasetField(this.getDataset().getCitationDateDatasetFieldType());
if (citationDate != null && citationDate.getDatasetFieldType().getFieldType().equals(FieldType.DATE)){
try {
return new SimpleDateFormat("yyyy").parse( citationDate.getValue() );
} catch (ParseException ex) {
Logger.getLogger(DatasetVersion.class.getName()).log(Level.SEVERE, null, ex);
}
}

return null;
}

public DatasetField getDatasetField(DatasetFieldType dsfType) {
if (dsfType != null) {
for (DatasetField dsf : this.getFlatDatasetFields()) {
if (dsf.getDatasetFieldType().equals(dsfType)) {
return dsf;
}
}
}
return null;

}

public String getDistributionDate() {
//todo get dist date from datasetfieldvalue table
Expand Down
42 changes: 38 additions & 4 deletions src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,19 @@
import edu.harvard.iq.dataverse.util.json.JsonParser;
import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
import edu.harvard.iq.dataverse.validation.BeanValidationServiceBean;
import java.io.StringReader;
import java.net.URI;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReader;
import javax.json.JsonValue;
import javax.json.JsonValue.ValueType;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.servlet.http.HttpServletRequest;
Expand Down Expand Up @@ -83,10 +88,39 @@ public Response getResponse() {
*/
public Response refineResponse( String message ) {
final Status statusCode = Response.Status.fromStatusCode(response.getStatus());
final Throwable cause = getCause();
return errorResponse(statusCode, message
+ (cause!=null ? " "+cause.getMessage() : "")
+ " (" + statusCode.toString() + ")" );
String baseMessage = getWrappedMessageWhenJson();

if ( baseMessage == null ) {
final Throwable cause = getCause();
baseMessage = (cause!=null ? cause.getMessage() : "");
}
return errorResponse(statusCode, message+" "+baseMessage);
}

/**
* In the common case of the wrapped response being of type JSON,
* return the message field it has (if any).
* @return the content of a message field, or {@code null}.
*/
String getWrappedMessageWhenJson() {
if ( response.getMediaType().equals(MediaType.APPLICATION_JSON_TYPE) ) {
Object entity = response.getEntity();
if ( entity == null ) return null;

String json = entity.toString();
try ( StringReader rdr = new StringReader(json) ){
JsonReader jrdr = Json.createReader(rdr);
JsonObject obj = jrdr.readObject();
if ( obj.containsKey("message") ) {
JsonValue message = obj.get("message");
return message.getValueType() == ValueType.STRING ? obj.getString("message") : message.toString();
} else {
return null;
}
}
} else {
return null;
}
}
}

Expand Down
39 changes: 38 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import edu.harvard.iq.dataverse.DOIEZIdServiceBean;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetField;
import edu.harvard.iq.dataverse.DatasetFieldType;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.Dataverse;
Expand All @@ -22,6 +23,7 @@
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.ListVersionsCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.SetDatasetCitationDateCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetTargetURLCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
Expand Down Expand Up @@ -124,7 +126,42 @@ public Response destroyDataset( @PathParam("id") Long id) {
} catch (WrappedResponse ex) {
return ex.refineResponse( "Failed to detroy dataset " + id );
}
}
}

@PUT
@Path("{id}/citationdate")
public Response setCitationDate( @PathParam("id") Long id, String dsfTypeName) {
try {
if ( dsfTypeName.trim().isEmpty() ){
throw new WrappedResponse( badRequest("Please provide a dataset field type in the requst body.") );
}
DatasetFieldType dsfType = null;
if (!":publicationDate".equals(dsfTypeName)) {
dsfType = datasetFieldSvc.findByName(dsfTypeName);
if (dsfType == null) {
throw new WrappedResponse( badRequest("Dataset Field Type Name " + dsfTypeName + " not found.") );
}
}

execCommand(new SetDatasetCitationDateCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(id), dsfType));

return okResponse("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));

} catch (WrappedResponse ex) {
return ex.refineResponse("Unable to set citation date for dataset " + id + ".");
}
}

@DELETE
@Path("{id}/citationdate")
public Response useDefaultCitationDate( @PathParam("id") Long id) {
try {
execCommand(new SetDatasetCitationDateCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(id), null));
return okResponse("Citation Date for dataset " + id + " set to default");
} catch (WrappedResponse ex) {
return ex.refineResponse("Unable to restore default citation date for dataset " + id + ".");
}
}

@GET
@Path("{id}/versions")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetFieldType;
import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;

/**
*
* @author gdurand
*/
@RequiredPermissions( Permission.PublishDataset )
public class SetDatasetCitationDateCommand extends AbstractCommand<Dataset>{


private final DatasetFieldType dsfType;
private final Dataset dataset;

public SetDatasetCitationDateCommand( DataverseRequest aRequest, Dataset dataset, DatasetFieldType dsfType ) {
super( aRequest, dataset );
this.dataset = dataset;
this.dsfType = dsfType;
}

@Override
public Dataset execute(CommandContext ctxt) throws CommandException {
if ( dsfType == null || dsfType.getFieldType().equals(FieldType.DATE) ) {
dataset.setCitationDateDatasetFieldType(dsfType);
} else {
throw new IllegalCommandException("Provided DatasetFieldtype is not a Date", this);
}

Dataset savedDataset = ctxt.datasets().merge(dataset);
ctxt.index().indexDataset(savedDataset, false);
return savedDataset;
}

}

0 comments on commit 0e6f43b

Please sign in to comment.