This commit is contained in:
Erik Perrone 2018-03-22 17:01:48 +00:00
parent c7f90167ee
commit 2754f320a5
6 changed files with 32 additions and 45 deletions

View File

@ -55,33 +55,19 @@ public class NLPHub extends HttpServlet {
private void doWork(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
token = Constants.getToken(request, devMode);
try {
String dataMiner = request.getParameter("dataminer");
String[] algs = request.getParameter("algs").split(",");
for(int i=0; i<algs.length; i++) {
algs[i] = algs[i].trim();
}
RunnerCommander commander = new RunnerCommander(algs, request.getParameter("plink"), request.getParameter("annotations"), token,
RunnerCommander commander = new RunnerCommander(dataMiner, algs, request.getParameter("plink"), request.getParameter("annotations"), token,
response);
commander.setSleepTime(100);
commander.setMaxWaitingTime(90*1000);
commander.startProcess();
/*
if (algs.length >= 1) {
NlpNerRunner runner = new NlpNerRunner(service, algs, token, response);
runner.run(request.getParameter("plink"), request.getParameter("annotations"),
request.getParameter("lang"));
} else {
response.setContentType("application/json;charset=UTF-8");
try {
PrintWriter writer = response.getWriter();
writer.println(new JsonManager().getErrorJsonResponse("No algorithm identifiers given."));
} catch (Exception ex) {
logger.error(ex.getLocalizedMessage());
}
}*/
} catch (Exception x) {
x.printStackTrace();
}

View File

@ -90,7 +90,7 @@ public class NLPUploader extends HttpServlet {
String sentence = NlpUtils.getLanguageRecognizerDigest(new String(content));
logger.info(sentence);
try {
NLpLanguageRecognizer.run(sentence, token, link, response);
NLpLanguageRecognizer.run(request.getParameter("dataminer"), sentence, token, link, response);
} catch (NlpHubException ex) {
writer.println(new JsonManager().getSuccessJsonResponse(Constants.UNAVAILABLE, link));
}
@ -148,7 +148,7 @@ public class NLPUploader extends HttpServlet {
String sentence = NlpUtils.getLanguageRecognizerDigest(stringContent);
logger.info(sentence);
try {
NLpLanguageRecognizer.run(sentence, token, link, response);
NLpLanguageRecognizer.run(request.getParameter("dataminer"), sentence, token, link, response);
} catch (NlpHubException ex) {
writer.println(new JsonManager().getSuccessJsonResponse(Constants.UNAVAILABLE, link));
}

View File

@ -8,6 +8,7 @@ public class Constants {
public static String DEFAULT_DESCRIPTION = "NlpHub upload";
public static String TOKEN_PARAMETER = "gcube-token";
public static String TEST_TOKEN = "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
public static String PUBLIC_TOKEN = "f57441ac-b361-4c2d-992a-40db034f1b8c-843339462";
public static String MIME_TEXT = "text/plain";
public static String CONTENT_TYPE = "Content-Type";
public static String UNAVAILABLE = "unavailable";

View File

@ -37,6 +37,7 @@ public class NLpLanguageRecognizer extends DataminerClient {
private Logger logger = Logger.getLogger(NLpLanguageRecognizer.class.getSimpleName());
private String sentence, publicLink;
public final static String RECOGNIZER_ID = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LANGUAGE_RECOGNIZER";
private String dataMiner = null;
public NLpLanguageRecognizer(String service, String token, String sentence) {
super(service, "", token);
@ -44,18 +45,22 @@ public class NLpLanguageRecognizer extends DataminerClient {
response = null;
}
public NLpLanguageRecognizer(String service, String token, String sentence, String publicLink,
public NLpLanguageRecognizer(String dataMiner, String service, String token, String sentence, String publicLink,
HttpServletResponse response) {
super(service, "", token);
this.sentence = sentence;
this.response = response;
this.publicLink = publicLink;
this.dataMiner = dataMiner;
}
public static void run(String sentence, String token, String publicLink, HttpServletResponse response) throws NlpHubException {
public static void run(String dataMiner, String sentence, String token, String publicLink, HttpServletResponse response) throws NlpHubException {
try {
//String urlService = "http://dataminer-prototypes.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
String urlService = Constants.DATAMINER_URL + "/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
if(dataMiner == null)
dataMiner = Constants.DATAMINER_URL;
else
dataMiner = "http://" + dataMiner;
String urlService = dataMiner + "/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
urlService += "&gcube-token=" + token;
urlService += "&lang=en-US";
urlService += "&Identifier=" + RECOGNIZER_ID;

View File

@ -20,13 +20,13 @@ import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
public class NlpAsyncNerRunner extends AsyncHttpRequest {
public final static String WPS_EXECUTE_URL = Constants.DATAMINER_URL + "/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
public final static String WPS_DESCRIBE_PROCESS_URL = Constants.DATAMINER_URL + "/wps/WebProcessingService?request=DescribeProcess&service=WPS&Version=1.0.0";
private String identifier, token, httpMethod, annotations, publicLink, language;
public static String WPS_EXECUTE_URL = "/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
public static String WPS_DESCRIBE_PROCESS_URL = "/wps/WebProcessingService?request=DescribeProcess&service=WPS&Version=1.0.0";
private String identifier, token, httpMethod, annotations, publicLink, language, dataMiner;
private Logger logger = Logger.getLogger(NlpAsyncNerRunner.class.getSimpleName());
private RunnerCommander commander;
public NlpAsyncNerRunner(String identifier, String token, String publicLink, String annotations, String language, RunnerCommander commander) {
public NlpAsyncNerRunner(String dataMiner, String identifier, String token, String publicLink, String annotations, String language, RunnerCommander commander) {
super();
this.identifier = identifier;
this.token = token;
@ -35,8 +35,13 @@ public class NlpAsyncNerRunner extends AsyncHttpRequest {
this.publicLink = publicLink;
this.language = language; // not used for the moment...
this.commander = commander;
if(dataMiner == null)
this.dataMiner = Constants.DATAMINER_URL;
else
this.dataMiner = "http://" + dataMiner;
ArrayList<NlpParameter> params = buildParameterString();
String serviceUrl = WPS_EXECUTE_URL + "&gcube-token=" + token + "&lang=en-US&Identifier=" + identifier;
String serviceUrl = this.dataMiner + WPS_EXECUTE_URL + "&gcube-token=" + token + "&lang=en-US&Identifier=" + identifier;
serviceUrl += "&" + setUrl(params);
super.setBaseUrl(serviceUrl);
super.setMethod(httpMethod);
@ -75,7 +80,7 @@ public class NlpAsyncNerRunner extends AsyncHttpRequest {
HttpURLConnection connection = null;
BufferedReader r = null;
try {
String finalUrl = WPS_DESCRIBE_PROCESS_URL + "&gcube-token=" + token;
String finalUrl = dataMiner + WPS_DESCRIBE_PROCESS_URL + "&gcube-token=" + token;
finalUrl += "&lang=en-US&Identifier=" + identifier;
URL url = new URL(finalUrl);
connection = (HttpURLConnection) url.openConnection();
@ -173,17 +178,4 @@ public class NlpAsyncNerRunner extends AsyncHttpRequest {
logger.error(x.getLocalizedMessage());
}
}
// public static void main(String[] args) {
// String id1 = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ENGLISH_NAMED_ENTITY_RECOGNIZER";
// String id2 = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ENGLISH_NER_CORENLP";
// String tokken = "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
// String ann = "Organization,Location,Person";
// String file = "http://data.d4science.org/TWhNTS9DdVdXaTZLSWsrWUNQdHk3OUdZSU93SXRFbjhHbWJQNStIS0N6Yz0";
// file = "http://data.d4science.org/L0txb3o3Tk9GaW1LSWsrWUNQdHk3MG1ZWFdtWkJENU5HbWJQNStIS0N6Yz0";
// NlpAsyncNerRunner n1 = new NlpAsyncNerRunner(id1, tokken, file, ann, null);
// NlpAsyncNerRunner n2 = new NlpAsyncNerRunner(id2, tokken, file, ann, null);
// n2.start();
// n1.start();
// }
}

View File

@ -15,23 +15,25 @@ public class RunnerCommander extends Thread {
private String[] identifiers = null;
private String link = "";
private String token = "";
private String dataMiner;
private ArrayList<String> outResultList = null;
private long sleepTime = 500l;
private long maxWaitingTime = 5l * 60l * 1000l;
private Logger logger = Logger.getLogger(RunnerCommander.class.getSimpleName());
private boolean complete = false;
public RunnerCommander(String[] identifiers, String link, String annotationList, String token) {
public RunnerCommander(String dataMiner, String[] identifiers, String link, String annotationList, String token) {
this.identifiers = identifiers;
this.annotationList = annotationList;
this.link = link;
this.token = token;
this.response = null;
this.sleepTime = 500l;
this.dataMiner = dataMiner;
outResultList = new ArrayList<String>();
}
public RunnerCommander(String[] identifiers, String link, String annotationList, String token,
public RunnerCommander(String dataMiner, String[] identifiers, String link, String annotationList, String token,
HttpServletResponse response) {
this.identifiers = identifiers;
this.annotationList = annotationList;
@ -39,6 +41,7 @@ public class RunnerCommander extends Thread {
this.token = token;
this.response = response;
this.sleepTime = 500l;
this.dataMiner = dataMiner;
outResultList = new ArrayList<String>();
}
@ -81,7 +84,7 @@ public class RunnerCommander extends Thread {
private void runAlgorithms() {
for (String id : identifiers) {
NlpAsyncNerRunner n = new NlpAsyncNerRunner(id, token, link, annotationList, null, this);
NlpAsyncNerRunner n = new NlpAsyncNerRunner(dataMiner, id, token, link, annotationList, null, this);
n.start();
}
}