Skip to content

Commit

Permalink
sparqlGraph use N_TRIPLES as the resultType for all CONSTRUCT queries.
Browse files Browse the repository at this point in the history
  • Loading branch information
Paul Cuddihy committed Apr 18, 2023
1 parent eba33bc commit 2425815
Show file tree
Hide file tree
Showing 10 changed files with 402 additions and 95 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -620,6 +620,19 @@ public String executeQueryToRdf(String query) throws Exception {
return (String) res.getResult(SparqlResultTypes.RDF.toString());
}

/**
*
* WARNING: see INTERNAL USE notes at top of this file
* @param query
* @return
* @throws Exception
*/
public String executeQueryToNtriples(String query) throws Exception {
SimpleResultSet res = (SimpleResultSet) this.executeQueryAndBuildResultSet(query, SparqlResultTypes.N_TRIPLES);
res.throwExceptionIfUnsuccessful();
return (String) res.getResult(SparqlResultTypes.N_TRIPLES.toString());
}

/**
* Execute query to table
* WARNING: see INTERNAL USE notes at top of this file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1038,13 +1038,14 @@ public JSONObject constructConnectedData(@RequestBody ConstructConnectedDataRequ
ResultsClient resClient = results_prop.getClient();

ConnectedDataConstructor constructor = new ConnectedDataConstructor(
requestBody.getInstanceVal(), requestBody.buildInstanceType(),
requestBody.getInstanceVal(), requestBody.buildInstanceType(), requestBody.getResultType(),
conn, this.retrieveOInfo(conn), getJobTracker(), resClient);

constructor.start();

retval.setSuccess(true);
retval.addResult(SimpleResultSet.JOB_ID_RESULT_KEY, constructor.getJobId());
retval.addResult(SimpleResultSet.RESULT_TYPE_KEY, requestBody.getResultType().toString());

} catch(Exception e){
retval.setSuccess(false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/

package com.ge.research.semtk.services.nodeGroupExecution.requests;
import com.ge.research.semtk.sparqlX.SparqlResultTypes;
import com.ge.research.semtk.sparqlX.XSDSupportedType;
import com.ge.research.semtk.springutilib.requests.SparqlConnectionRequest;

Expand All @@ -24,18 +25,25 @@

public class ConstructConnectedDataRequest extends SparqlConnectionRequest {

@Schema(
required = true,
@Schema(requiredMode = Schema.RequiredMode.NOT_REQUIRED,
example = "http://path#this")
private String instanceVal;

@Schema(
required = false,
@Schema(requiredMode = Schema.RequiredMode.NOT_REQUIRED,
example = "node_uri")
private String instanceType = null;

@Schema(requiredMode = Schema.RequiredMode.NOT_REQUIRED,
description = "Defaults to json-ld",
example = "N_TRIPLES")
public SparqlResultTypes resultType = SparqlResultTypes.GRAPH_JSONLD;


public String getInstanceVal() {
public SparqlResultTypes getResultType() {
return resultType;
}

public String getInstanceVal() {
return instanceVal;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,15 @@
import com.ge.research.semtk.edc.client.ResultsClient;
import com.ge.research.semtk.sparqlToXLib.SparqlToXLibUtil;
import com.ge.research.semtk.sparqlX.SparqlConnection;
import com.ge.research.semtk.sparqlX.SparqlResultTypes;
import com.ge.research.semtk.sparqlX.XSDSupportedType;
import com.ge.research.semtk.utility.LocalLogger;

public class ConnectedDataConstructor extends Thread {

private String instanceVal = null;
private XSDSupportedType instanceType = null;
private SparqlResultTypes resultType = SparqlResultTypes.GRAPH_JSONLD;
private SparqlConnection conn = null;
private OntologyInfo oInfo = null;
private JobTracker tracker = null;
Expand All @@ -34,10 +36,15 @@ public class ConnectedDataConstructor extends Thread {
* @param resClient - "
* @throws Exception
*/
public ConnectedDataConstructor(String instanceVal, XSDSupportedType instanceType, SparqlConnection conn, OntologyInfo oInfo, JobTracker tracker, ResultsClient resClient) throws Exception {
public ConnectedDataConstructor(String instanceVal, XSDSupportedType instanceType, SparqlResultTypes resultType, SparqlConnection conn, OntologyInfo oInfo, JobTracker tracker, ResultsClient resClient) throws Exception {

this.instanceVal = instanceVal;
this.instanceType = instanceType;
this.resultType = resultType;

if (resultType != SparqlResultTypes.GRAPH_JSONLD && resultType != SparqlResultTypes.N_TRIPLES)
throw new Exception("Unsupported result type: " + resultType.toString());

this.conn = conn;
this.oInfo = oInfo;
this.tracker = tracker;
Expand All @@ -49,27 +56,14 @@ public ConnectedDataConstructor(String instanceVal, XSDSupportedType instanceTyp
this.resClient = resClient;
}

public ConnectedDataConstructor(String instanceVal, XSDSupportedType instanceType, SparqlConnection conn) throws Exception {
this(instanceVal, instanceType, conn, null, null, null);
public ConnectedDataConstructor(String instanceVal, XSDSupportedType instanceType, SparqlResultTypes resultType, SparqlConnection conn) throws Exception {
this(instanceVal, instanceType, resultType, conn, null, null, null);
}

public String getJobId() {
return this.jobId;
}
public JSONObject queryJsonLd() throws Exception {
JSONObject ret = null;

if (this.oInfo == null) {
this.oInfo = new OntologyInfo(this.conn);
}
String sparql = SparqlToXLibUtil.generateConstructConnected(this.conn, this.oInfo, this.instanceVal, this.instanceType);
ret = this.conn.getDefaultQueryInterface().executeQueryToJsonLd(sparql);

// TODO: use oInfo to clean stuff out

return ret;
}


public void run() {

try {
Expand All @@ -80,10 +74,21 @@ public void run() {

this.tracker.setJobPercentComplete(this.jobId, 10, "Querying data");

JSONObject jObj = this.queryJsonLd();

tracker.setJobPercentComplete(this.jobId, 90, "Storing results");
resClient.execStoreGraphResults(this.jobId, jObj);

if (this.oInfo == null) {
this.oInfo = new OntologyInfo(this.conn);
}
String sparql = SparqlToXLibUtil.generateConstructConnected(this.conn, this.oInfo, this.instanceVal, this.instanceType);

if (this.resultType == SparqlResultTypes.GRAPH_JSONLD) {
resClient.execStoreGraphResults(this.jobId, this.conn.getDefaultQueryInterface().executeQueryToJsonLd(sparql));

} else if (this.resultType == SparqlResultTypes.N_TRIPLES) {
JSONObject jObj = new JSONObject();
jObj.put(SparqlResultTypes.N_TRIPLES.toString(), this.conn.getDefaultQueryInterface().executeQueryToNtriples(sparql));
resClient.execStoreGraphResults(this.jobId, jObj);
}

this.tracker.setJobSuccess(this.jobId);

Expand Down
1 change: 1 addition & 0 deletions sparqlGraphWeb/sparqlGraph/js/belmont.js
Original file line number Diff line number Diff line change
Expand Up @@ -1829,6 +1829,7 @@ SemanticNodeGroup.RT_JSONLD = "GRAPH_JSONLD";
SemanticNodeGroup.RT_CONFIRM = "CONFIRM";
SemanticNodeGroup.RT_OWLRDF = "RDF";
SemanticNodeGroup.RT_HTML = "HTML";
SemanticNodeGroup.RT_NTRIPLES = "N_TRIPLES";

SemanticNodeGroup.FUNCTION_MIN = 0;
SemanticNodeGroup.FUNCTION_MAX = 1;
Expand Down
31 changes: 22 additions & 9 deletions sparqlGraphWeb/sparqlGraph/js/exploretab.js
Original file line number Diff line number Diff line change
Expand Up @@ -763,10 +763,10 @@ define([ // properly require.config'ed
IIDXHelper.progressBarCreate(this.progressDiv, "progress-info progress-striped active");
IIDXHelper.progressBarSetPercent(this.progressDiv, 0, "");
this.busy(true);
client.execAsyncDispatchRawSparql(sparql, gConn, jsonLdCallback, failureCallback, "GRAPH_JSONLD");
client.execAsyncDispatchRawSparql(sparql, gConn, jsonLdCallback, failureCallback, SemanticNodeGroup.RT_NTRIPLES);
},

drawOntologyDetailCallback : function (jsonLdResults) {
drawOntologyDetailCallback : function (res) {
IIDXHelper.progressBarSetPercent(this.progressDiv, 90);

var network = this.networkHash[ExploreTab.MODE_ONTOLOGY_DETAIL];
Expand All @@ -776,16 +776,29 @@ define([ // properly require.config'ed
network.deleteSelected();

// add new
var jsonLd = jsonLdResults.getGraphResultsJsonArr(false, false, false);
var edgeList = [];
var nodeDict = {};
for (var i=0; i < jsonLd.length; i++) {
VisJsHelper.addJsonLdObject(jsonLd[i], nodeDict, edgeList, true);
if (i % 20 == 0) {
network.body.data.nodes.update(Object.values(nodeDict));
network.body.data.edges.update(edgeList);
}

if (res.isJsonLdResults()) {
var jsonLd = res.getGraphResultsJsonArr(false, false, false);
for (var i=0; i < jsonLd.length; i++) {
VisJsHelper.addJsonLdObject(jsonLd[i], nodeDict, edgeList, true);
if (i % 20 == 0) {
network.body.data.nodes.update(Object.values(nodeDict));
network.body.data.edges.update(edgeList);
}
}
} else if (res.isNtriplesResults()) {
triples = res.getNtriplesArray();
for (var i=0; i < triples.length; i++) {
VisJsHelper.addTriple(triples[i], nodeDict, edgeList, true, false);
if (i % 20 == 0) {
network.body.data.nodes.update(Object.values(nodeDict));
network.body.data.edges.update(edgeList);
}
}
}

network.body.data.nodes.update(Object.values(nodeDict));
network.body.data.edges.update(edgeList);

Expand Down
40 changes: 26 additions & 14 deletions sparqlGraphWeb/sparqlGraph/js/msiclientnodegroupexec.js
Original file line number Diff line number Diff line change
Expand Up @@ -279,36 +279,47 @@ define([ // properly require.config'ed bootstrap-modal
** You can build a jobIdCallback with one of the
** MsiClientNodeGroupExec.build___Callback() functions
*/
/* deprecated: use execAsyncDisatchQueryFromNodeGroup */
execAsyncDispatchConstructFromNodeGroup : function(nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback) {
this.runAsyncNodegroup("dispatchConstructFromNodegroup",
nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback);
},

/* deprecated: use execAsyncDisatchQueryFromNodeGroup */
execAsyncDispatchSelectFromNodeGroup : function(nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback) {
this.runAsyncNodegroup("dispatchSelectFromNodegroup",
nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback);
},
execAsyncDispatchQueryFromNodeGroup : function(nodegroup, conn, queryType, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback) {
var data = JSON.stringify ({
"jsonRenderedNodeGroup": JSON.stringify(nodegroup.toJson()),
"sparqlConnection": JSON.stringify(conn.toJson()),
"queryType" : queryType,
"runtimeConstraints": (typeof runtimeConstraints == "undefined" || runtimeConstraints == null) ? "" : JSON.stringify(runtimeConstraints.toJson()),
"externalDataConnectionConstraints": (typeof edcConstraints == "undefined" || edcConstraints == null) ? "" : JSON.stringify(edcConstraints.toJson())
});

this.runAsync("dispatchQueryFromNodegroup", data, jobIdCallback, failureCallback);
/* === latest: replaces others ===
** execute query from a nodegroup
** query
*/
execAsyncDispatchQueryFromNodeGroup : function(nodegroup, optConn, jobIdCallback, optFailureCallback, optQueryType, optResultType, optRuntimeConstraints, optEdcConstraints, optTargetId, optFlags) {

var data = {};
data["jsonRenderedNodeGroup"] = JSON.stringify(nodegroup.toJson());

if (optQueryType) data["queryType"] = optQueryType;
if (optResultType) data["resultType"] = optResultType;
if (optConn) data["sparqlConnection"] = JSON.stringify(optConn.toJson());
if (optFlags) data["flags"] = optFlags;
if (optTargetId) data["targetObjectSparqlId"] = optTargetId;
if (optRuntimeConstraints) data["runtimeConstraints"] = JSON.stringify(optRuntimeConstraints.toJson());
if (optEdcConstraints) data["externalDataConnectionConstraints"] = JSON.stringify(optEdcConstraints.toJson());

this.runAsync("dispatchQueryFromNodegroup", JSON.stringify(data), jobIdCallback, optFailureCallback);
},

/* deprecated: use execAsyncDisatchQueryFromNodeGroup */
execAsyncDispatchDeleteFromNodeGroup : function(nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback) {
this.runAsyncNodegroup("dispatchDeleteFromNodegroup",
nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback);
},

/* deprecated: use execAsyncDisatchQueryFromNodeGroup */
execAsyncDispatchCountFromNodeGroup : function(nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback) {
this.runAsyncNodegroup("dispatchCountFromNodegroup",
nodegroup, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback);
},

/* deprecated: use execAsyncDisatchQueryFromNodeGroup */
execAsyncDispatchFilterFromNodeGroup : function(nodegroup, conn, sparqlId, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback) {
this.runAsyncNodegroupSparqlId("dispatchFilterFromNodegroup",
nodegroup, sparqlId, conn, edcConstraints, runtimeConstraints, jobIdCallback, failureCallback);
Expand Down Expand Up @@ -352,10 +363,11 @@ define([ // properly require.config'ed bootstrap-modal
sei, jobIdCallback, failureCallback);
},

execAsyncConstructConnectedData : function(instanceVal, instanceType, conn, jobIdCallback, failureCallback) {
execAsyncConstructConnectedData : function(instanceVal, instanceType, resultType, conn, jobIdCallback, failureCallback) {
var data = JSON.stringify ({
"instanceVal" : instanceVal,
"instanceType": instanceType,
"resultType": resultType,
"conn": JSON.stringify(conn.toJson())
});

Expand Down
18 changes: 18 additions & 0 deletions sparqlGraphWeb/sparqlGraph/js/msiresultset.js
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,10 @@ define([ // properly require.config'ed bootstrap-modal
isJsonLdResults : function () {
return this.xhr.hasOwnProperty("@graph") || this.xhr.hasOwnProperty("@id") || this.xhr.hasOwnProperty("@context") || JSON.stringify(this.xhr) === "{}";
},

isNtriplesResults : function () {
return this.xhr.hasOwnProperty("N_TRIPLES");
},

getColumnName : function (x) {
return this.getTable().col_names[x];
Expand Down Expand Up @@ -116,6 +120,20 @@ define([ // properly require.config'ed bootstrap-modal
}
return graphArr;
},

getNtriplesText : function() {
return this.xhr["N_TRIPLES"];
},

getNtriplesArray : function() {
ret = []
for (let line of this.getNtriplesText().split("\n") ) {
f = line.replace(/\s*\.\s*$/, "").split(" ");
if (f[0] && f[1])
ret.push([ f[0], f[1], f.slice(2,).join(" ") ]);
}
return ret;
},

// use json-ld @context to expand a json-ld array
expandJsonArrWithContext : function(jArr) {
Expand Down
Loading

0 comments on commit 2425815

Please sign in to comment.