AriadnePlus/dnet-ariadneplus/src/main/java/eu/dnetlib/ariadneplus/workflows/nodes/EnrichGraphDBContentJobNode...

161 lines
4.9 KiB
Java

package eu.dnetlib.ariadneplus.workflows.nodes;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import eu.dnetlib.enabling.resultset.client.ResultSetClient;
import eu.dnetlib.msro.workflows.graph.Arc;
import eu.dnetlib.msro.workflows.nodes.AsyncJobNode;
import eu.dnetlib.msro.workflows.procs.Env;
import eu.dnetlib.msro.workflows.procs.Token;
import eu.dnetlib.msro.workflows.util.ResultsetProgressProvider;
import eu.dnetlib.msro.workflows.util.WorkflowsConstants;
import eu.dnetlib.rmi.common.ResultSet;
import eu.dnetlib.rmi.manager.MSROException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.net.ConnectException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
public class EnrichGraphDBContentJobNode extends AsyncJobNode {
private static final Log log = LogFactory.getLog(EnrichGraphDBContentJobNode.class);
private String eprParam;
@Autowired
private ResultSetClient resultSetClient;
private String sparqlUpdateQuery;
private String publisherEndpoint;
private String datasourceInterface;
private String datasource;
//for parallel requests to the publisher endpoint
private int nThreads = 5;
@Override
protected String execute(final Env env) throws Exception {
int statusCode = -1;
String enrichResult = "noResult";
log.info("Publisher endpoint: " + getPublisherEndpoint());
log.info("Enrich Query Value: " + getSparqlUpdateQuery());
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
cm.setMaxTotal(nThreads);
CloseableHttpClient client = HttpClients.custom().setConnectionManager(cm).build();
log.info("Enrich endpoint: " + getEnrichEndpoint());
CloseableHttpResponse responsePOST = null;
try {
HttpPost post = new HttpPost(getEnrichEndpoint());
List<NameValuePair> params = Lists.newArrayList();
String datasourceInterfaceValue = getDatasourceInterface();
StringEntity entity = new StringEntity(getSparqlUpdateQuery());
post.setEntity(entity);
responsePOST = client.execute(post);
statusCode = responsePOST.getStatusLine().getStatusCode();
switch (statusCode) {
case 200:
log.info("enrich graphDB content completed");
break;
default:
log.error("error enriching graphDB " + responsePOST.getStatusLine().getStatusCode() + ": " + responsePOST.getStatusLine().getReasonPhrase());
break;
}
} catch (ConnectException ce) {
throw new MSROException("unable to connect to Publisher endpoint" + getEnrichEndpoint());
}
catch (IOException e) {
log.error("IO error enriching graphDB ", e);
}
finally{
if(responsePOST != null) responsePOST.close();
client.close();
cm.shutdown();
}
env.setAttribute(WorkflowsConstants.MAIN_LOG_PREFIX + "statusCode", Integer.toString(statusCode));
env.setAttribute(WorkflowsConstants.MAIN_LOG_PREFIX + "enrichResult", enrichResult);
log.info("enriching completed");
return Arc.DEFAULT_ARC;
}
public String getPublisherEndpoint() {
return publisherEndpoint;
}
private String getEnrichEndpoint() {
return publisherEndpoint.concat("/updateSparql");
}
public void setPublisherEndpoint(final String publisherEndpoint) {
this.publisherEndpoint = publisherEndpoint;
}
public ResultSetClient getResultSetClient() {
return resultSetClient;
}
public void setResultSetClient(final ResultSetClient resultSetClient) {
this.resultSetClient = resultSetClient;
}
public String getEprParam() {
return eprParam;
}
public void setEprParam(String eprParam) {
this.eprParam = eprParam;
}
public String getDatasourceInterface() {
return datasourceInterface;
}
public void setDatasourceInterface(String datasourceInterface) {
this.datasourceInterface = datasourceInterface;
}
@Override
protected void beforeStart(Token token) {
token.setProgressProvider(new ResultsetProgressProvider(token.getEnv().getAttribute(getEprParam(), ResultSet.class), this.resultSetClient));
}
public String getDatasource() {
return datasource;
}
public void setDatasource(String datasource) {
this.datasource = datasource;
}
public String getSparqlUpdateQuery() {
return sparqlUpdateQuery;
}
public void setSparqlUpdateQuery(String sparqlUpdateQuery) {
this.sparqlUpdateQuery = sparqlUpdateQuery;
}
}