added rest method to execute sparql insert query on graphdb

This commit is contained in:
Enrico Ottonello 2020-05-27 23:07:08 +02:00
parent d23f08f750
commit 739dcc6b82
3 changed files with 73 additions and 53 deletions

View File

@ -10,6 +10,7 @@ import org.eclipse.rdf4j.RDF4JException;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.model.ValueFactory;
import org.eclipse.rdf4j.query.*;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.manager.RemoteRepositoryManager;
@ -239,53 +240,45 @@ public class GraphDBClient {
public void setRepository(String repository) {
this.repository = repository;
}
public String executeSparql(final String queryValue) throws AriadnePlusPublisherException{
try {
String result = new String("");
log.debug("init connection to graphDBServerUrl " + this.graphDBServerUrl);
RemoteRepositoryManager manager = new RemoteRepositoryManager(this.graphDBServerUrl);
manager.init();
manager.setUsernameAndPassword(getWriterUser(), getWriterPwd());
log.debug("manager init");
Repository repository = manager.getRepository(getRepository());
ValueFactory factory = repository.getValueFactory();
try (RepositoryConnection con = repository.getConnection()) {
log.debug("connection established");
con.begin();
Update updateResultQuery = con.prepareUpdate(queryValue);
if (updateResultQuery!=null) {
updateResultQuery.execute();
result = "updated";
}
else {
result = "No result.";
}
log.debug("query result: "+result);
con.commit();
log.debug("query executed");
con.close();
}
catch (RDF4JException e) {
log.error("error executing query ...", e);
}
repository.shutDown();
manager.shutDown();
log.debug("manager shutDown");
return result;
}catch(Throwable e){
log.error(e);
throw new AriadnePlusPublisherException(e);
}
}
}
//
// strQuery =
// "SELECT ?name FROM DEFAULT WHERE {" +
// "?s <http://xmlns.com/foaf/0.1/name> ?name .}";
// }
//
// public void queryTest(){
// RemoteRepositoryManager manager = new RemoteRepositoryManager(GRAPHDB_SERVER_URL);
// manager.init();
// logger.debug("manager init");
// Repository repository = manager.getRepository("PersonData");
// try (RepositoryConnection con = repository.getConnection()) {
// logger.debug("connection established");
// query(con);
// logger.debug("query success");
// }
// catch (RDF4JException e) {
// logger.error("error adding statement ...", e);
// }
// manager.shutDown();
// logger.debug("manager shutDown");
// }
//
// private void query(RepositoryConnection repositoryConnection) {
// TupleQuery tupleQuery = repositoryConnection.prepareTupleQuery(QueryLanguage.SPARQL, strQuery);
// TupleQueryResult result = null;
// try {
// result = tupleQuery.evaluate();
// int count = 0;
// while (result.hasNext()) {
// BindingSet bindingSet = result.next();
//
// SimpleLiteral name = (SimpleLiteral)bindingSet.getValue("name");
// logger.info("name = " + name.stringValue());
// count++;
// }
// logger.info("Entries found: ", count);
// }
// catch (QueryEvaluationException qee) {
// logger.error(WTF_MARKER, qee.getStackTrace().toString(), qee);
// } finally {
// result.close();
// }
// }

View File

@ -3,10 +3,7 @@ package eu.dnetlib.ariadneplus.publisher;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.*;
import eu.dnetlib.ariadneplus.publisher.AriadnePlusPublisherHelper.AriadnePlusTargets;
@ -70,4 +67,17 @@ public class AriadnePlusPublisherController {
public void setAriadnePlusPublisherHelper(final AriadnePlusPublisherHelper ariadneplusPublisherHelper) {
this.ariadneplusPublisherHelper = ariadneplusPublisherHelper;
}
@RequestMapping(value = "/executeSparql", method = RequestMethod.POST)
public String executeSparql(@RequestBody final String queryValue) throws AriadnePlusPublisherException {
// queryValue = "PREFIX aocat: <https://www.ariadne-infrastructure.eu/resource/ao/cat/1.1/>\n" +
// " PREFIX skos:<http://www.w3.org/2004/02/skos/core#>\n" +
// " INSERT { GRAPH <https://ariadne-infrastructure.eu/api_________::ariadne_plus::ads::271> { <https://ariadne-infrastructure.eu/aocat/Resource/0D02D6C0-E687-342E-891D-82B39A880F4E> aocat:has_title \" inserito da controller rest\" } }\n" +
// " WHERE{\n" +
// " GRAPH <https://ariadne-infrastructure.eu/api_________::ariadne_plus::ads::271> {\n" +
// " { <https://ariadne-infrastructure.eu/aocat/Resource/0D02D6C0-E687-342E-891D-82B39A880F4E> aocat:has_title ?title } .\n" +
// " }\n" +
// " };";
return getAriadnePlusPublisherHelper().executeSparql(queryValue, getTarget(DEFAULT_TARGET_ENDPOINT));
}
}

View File

@ -66,6 +66,17 @@ public class AriadnePlusPublisherHelper {
return res;
}
public String executeSparql(final String queryValue, final AriadnePlusTargets target) throws AriadnePlusPublisherException {
String res;
switch(target){
case GRAPHDB:
res = executeSparqlGraphDB(queryValue);
break;
default: throw new AriadnePlusPublisherException("Target "+target+" not supported yet");
}
return res;
}
private void publishGraphDB(final String record) throws AriadnePlusPublisherException {
log.debug("Publishing on graphdb");
GraphDBClient graphDBClient = this.graphdbClientFactory.getGraphDBClient();
@ -90,4 +101,10 @@ public class AriadnePlusPublisherHelper {
return 0;
}
private String executeSparqlGraphDB(final String queryValue) throws AriadnePlusPublisherException {
log.info("executeSparqlGraphDB "+queryValue);
GraphDBClient graphDBClient = this.graphdbClientFactory.getGraphDBClient();
return graphDBClient.executeSparql(queryValue);
}
}