package eu.dnetlib.ariadneplus.workflows.nodes; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gson.Gson; import eu.dnetlib.enabling.resultset.client.ResultSetClient; import eu.dnetlib.msro.workflows.graph.Arc; import eu.dnetlib.msro.workflows.nodes.AsyncJobNode; import eu.dnetlib.msro.workflows.procs.Env; import eu.dnetlib.msro.workflows.procs.Token; import eu.dnetlib.msro.workflows.util.ResultsetProgressProvider; import eu.dnetlib.msro.workflows.util.WorkflowsConstants; import eu.dnetlib.rmi.common.ResultSet; import eu.dnetlib.rmi.manager.MSROException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.message.BasicNameValuePair; import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.net.ConnectException; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; public class EnrichGraphDBContentJobNode extends AsyncJobNode { private static final Log log = LogFactory.getLog(EnrichGraphDBContentJobNode.class); private String sparqlUpdateQuery; private String publisherEndpoint; private String datasourceInterface; private String datasource; //for parallel requests to the publisher endpoint private int nThreads = 5; @Override protected String execute(final Env env) throws Exception { int statusCode = -1; String enrichResult = "noResult"; log.info("Publisher endpoint: " + getPublisherEndpoint()); log.info("Enrich Query Value: " + getSparqlUpdateQuery()); PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(); cm.setMaxTotal(nThreads); CloseableHttpClient client = HttpClients.custom().setConnectionManager(cm).build(); log.info("Enrich endpoint: " + getEnrichEndpoint()); CloseableHttpResponse responsePOST = null; try { HttpPost post = new HttpPost(getEnrichEndpoint()); List params = Lists.newArrayList(); String datasourceInterfaceValue = getDatasourceInterface(); StringEntity entity = new StringEntity(getSparqlUpdateQuery()); post.setEntity(entity); responsePOST = client.execute(post); statusCode = responsePOST.getStatusLine().getStatusCode(); switch (statusCode) { case 200: log.info("enrich graphDB content completed"); break; default: log.error("error enriching graphDB " + responsePOST.getStatusLine().getStatusCode() + ": " + responsePOST.getStatusLine().getReasonPhrase()); break; } } catch (ConnectException ce) { log.error(ce); throw new MSROException("Unable to connect to Publisher endpoint" + getEnrichEndpoint()); } catch (IOException e) { log.error(e); throw new MSROException("IO Error" + getEnrichEndpoint()); } finally{ if(responsePOST != null) responsePOST.close(); client.close(); cm.shutdown(); } env.setAttribute(WorkflowsConstants.MAIN_LOG_PREFIX + "statusCode", Integer.toString(statusCode)); env.setAttribute(WorkflowsConstants.MAIN_LOG_PREFIX + "enrichResult", enrichResult); log.info(enrichResult); if (statusCode!=200) { throw new MSROException("Error from Publisher endpoint [ status code: " + statusCode + " ]"); } return Arc.DEFAULT_ARC; } public String getPublisherEndpoint() { return publisherEndpoint; } private String getEnrichEndpoint() { return publisherEndpoint.concat("/updateSparql"); } public void setPublisherEndpoint(final String publisherEndpoint) { this.publisherEndpoint = publisherEndpoint; } public String getDatasourceInterface() { return datasourceInterface; } public void setDatasourceInterface(String datasourceInterface) { this.datasourceInterface = datasourceInterface; } public String getDatasource() { return datasource; } public void setDatasource(String datasource) { this.datasource = datasource; } public String getSparqlUpdateQuery() { return sparqlUpdateQuery; } public void setSparqlUpdateQuery(String sparqlUpdateQuery) { this.sparqlUpdateQuery = sparqlUpdateQuery; } }