package eu.dnetlib.ariadneplus.workflows.nodes; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gson.Gson; import eu.dnetlib.enabling.resultset.client.ResultSetClient; import eu.dnetlib.msro.workflows.graph.Arc; import eu.dnetlib.msro.workflows.nodes.AsyncJobNode; import eu.dnetlib.msro.workflows.procs.Env; import eu.dnetlib.msro.workflows.procs.Token; import eu.dnetlib.msro.workflows.util.ResultsetProgressProvider; import eu.dnetlib.msro.workflows.util.WorkflowsConstants; import eu.dnetlib.rmi.common.ResultSet; import eu.dnetlib.rmi.manager.MSROException; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.message.BasicNameValuePair; import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.net.ConnectException; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; public class EnrichGraphDBContentJobNode extends AsyncJobNode { private static final Log log = LogFactory.getLog(EnrichGraphDBContentJobNode.class); private String sparqlUpdateQuery; private String publisherEndpoint; private String datasourceInterface; private String datasource; //for parallel requests to the publisher endpoint private int nThreads = 1; @Override protected String execute(final Env env) throws Exception { int statusCode = -1; String enrichResult = ""; PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(); cm.setMaxTotal(nThreads); CloseableHttpClient client = HttpClients.custom().setConnectionManager(cm).build(); log.info("Enrich endpoint: " + getEnrichEndpoint()); CloseableHttpResponse responsePOST = null; String queryValue = getSparqlUpdateQuery(); int countQueries = 0; int countSuccess = 0; String endpoint = getEnrichEndpoint(); for(String query : Splitter.on(";").split(queryValue)){ if (StringUtils.isNoneBlank(query)) { countQueries++; HttpPost post = new HttpPost(endpoint); StringEntity entity = new StringEntity(query); post.setEntity(entity); responsePOST = client.execute(post); statusCode = responsePOST.getStatusLine().getStatusCode(); switch (statusCode) { case 200: log.info(String.format("Query %d executed: %s", countQueries, query)); break; default: log.error("error enriching graphDB " + responsePOST.getStatusLine().getStatusCode() + ": " + responsePOST.getStatusLine().getReasonPhrase()); throw new MSROException(String.format("Cannot execute sparql from %s", query)); } countSuccess++; } } enrichResult = String.format("Queries committed with success %d/%d", countSuccess, countQueries); log.info(enrichResult); if(responsePOST != null) responsePOST.close(); client.close(); cm.shutdown(); env.setAttribute(WorkflowsConstants.MAIN_LOG_PREFIX + "enrichResult", enrichResult); return Arc.DEFAULT_ARC; } public String getPublisherEndpoint() { return publisherEndpoint; } private String getEnrichEndpoint() { return publisherEndpoint.concat("/updateSparql"); } public void setPublisherEndpoint(final String publisherEndpoint) { this.publisherEndpoint = publisherEndpoint; } public String getDatasourceInterface() { return datasourceInterface; } public void setDatasourceInterface(String datasourceInterface) { this.datasourceInterface = datasourceInterface; } public String getDatasource() { return datasource; } public void setDatasource(String datasource) { this.datasource = datasource; } public String getSparqlUpdateQuery() { return sparqlUpdateQuery; } public void setSparqlUpdateQuery(String sparqlUpdateQuery) { this.sparqlUpdateQuery = sparqlUpdateQuery; } }