ref 11719: Add management of VREs to the NLPHub interface

https://support.d4science.org/issues/11719

Fixed some issue on webapp:

Errors of shared variables on servlets
Added slf4j over the whole application and logs in nlphub.log
Problem of checking nulls on arrays in Javascript
Added the ability to retrieve the context and token from the RequestUri

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/nlphub@167438 82a268e6-3cf1-43bd-a215-b396298e98cf
master
Giancarlo Panichi 6 years ago
parent 186e910618
commit 6c8130fb64

@ -105,20 +105,13 @@
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.6.4</version>
<scope>compile</scope>
<version>1.7.5</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.6.4</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.16</version>
<scope>compile</scope>
<scope>test</scope>
</dependency>
<!-- JUnit -->
<dependency>

@ -14,12 +14,12 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.legacy.Constants;
import org.gcube.data.analysis.nlphub.legacy.JsonManager;
import org.gcube.data.analysis.nlphub.nlp.NlpAsyncNerRunner;
import org.gcube.data.analysis.nlphub.nlp.NlpParameter;
import org.gcube.data.analysis.nlphub.nlp.RunnerCommander;
import org.gcube.data.analysis.nlphub.session.SessionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@ -30,12 +30,9 @@ import org.xml.sax.InputSource;
*/
@WebServlet(asyncSupported = true, name = "NLPServlet", urlPatterns = { "/nlphub-servlet" })
public class NLPHub extends HttpServlet {
private Logger logger = Logger.getLogger(NLPHub.class.getSimpleName());
private static final long serialVersionUID = 1L;
public static final String service = "http://dataminer-prototypes.d4science.org/wps/";
private String token; //= "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
private boolean devMode = true;
private static final Logger logger = LoggerFactory.getLogger(NLPHub.class);
/**
* @see HttpServlet#HttpServlet()
*/
@ -63,16 +60,18 @@ public class NLPHub extends HttpServlet {
}
private void doWork(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if(request.getParameter("getInfo") != null) {
getAlgorithmInfo(request, response);
}
else {
runAlgorithms(request, response);
logger.debug("NLPHub");
String token = SessionUtils.getToken(request);
if (request.getParameter("getInfo") != null) {
getAlgorithmInfo(request, response, token);
} else {
runAlgorithms(request, response, token);
}
}
private void getAlgorithmInfo(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
token = Constants.getToken(request, devMode);
private void getAlgorithmInfo(HttpServletRequest request, HttpServletResponse response, String token)
throws ServletException, IOException {
try {
String dataMiner = request.getParameter("dataminer");
String algId = request.getParameter("algId");
@ -82,7 +81,8 @@ public class NLPHub extends HttpServlet {
PrintWriter writer = response.getWriter();
response.setContentType("application/json;charset=UTF-8");
try {
String finalUrl = "https://" + dataMiner + NlpAsyncNerRunner.WPS_DESCRIBE_PROCESS_URL + "&gcube-token=" + token;
String finalUrl = "https://" + dataMiner + NlpAsyncNerRunner.WPS_DESCRIBE_PROCESS_URL + "&gcube-token="
+ token;
finalUrl += "&lang=en-US&Identifier=" + algId;
URL url = new URL(finalUrl);
connection = (HttpURLConnection) url.openConnection();
@ -93,22 +93,21 @@ public class NLPHub extends HttpServlet {
r = new BufferedReader(new InputStreamReader(connection.getInputStream()));
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new InputSource(r));
doc.getDocumentElement().normalize();
NodeList nListInput = doc.getElementsByTagName("ows:Abstract");
if(nListInput.getLength() <= 0) {
if (nListInput.getLength() <= 0) {
algAbsrtact = "No description.";
writer.println(new JsonManager().getErrorJsonResponse(algAbsrtact));
}
else {
} else {
Node nodeAbstract = nListInput.item(0);
algAbsrtact = nodeAbstract.getTextContent();
writer.println(new JsonManager().getSuccessJsonResponse("" + algAbsrtact));
}
writer.close();
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
writer.println(new JsonManager().getErrorJsonResponse("" + x.getLocalizedMessage()));
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
writer.println(new JsonManager().getErrorJsonResponse("" + e.getLocalizedMessage()));
writer.close();
} finally {
try {
@ -120,31 +119,30 @@ public class NLPHub extends HttpServlet {
logger.error(e.getLocalizedMessage());
}
}
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
}
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
}
}
private void runAlgorithms(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
token = Constants.getToken(request, devMode);
private void runAlgorithms(HttpServletRequest request, HttpServletResponse response, String token)
throws ServletException, IOException {
try {
String dataMiner = request.getParameter("dataminer");
String[] algs = request.getParameter("algs").split(",");
for(int i=0; i<algs.length; i++) {
for (int i = 0; i < algs.length; i++) {
algs[i] = algs[i].trim();
}
RunnerCommander commander = new RunnerCommander(dataMiner, algs, request.getParameter("plink"), request.getParameter("annotations"), token,
response);
RunnerCommander commander = new RunnerCommander(dataMiner, algs, request.getParameter("plink"),
request.getParameter("annotations"), token, response);
commander.setSleepTime(100);
commander.setMaxWaitingTime(90*1000);
commander.setMaxWaitingTime(90 * 1000);
commander.startProcess();
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
}
}
}

@ -14,13 +14,15 @@ import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.legacy.Constants;
import org.gcube.data.analysis.nlphub.legacy.JsonManager;
import org.gcube.data.analysis.nlphub.legacy.NlpHubException;
import org.gcube.data.analysis.nlphub.mapper.DefaultMapper;
import org.gcube.data.analysis.nlphub.mapper.JsonMapper;
import org.gcube.data.analysis.nlphub.session.SessionUtils;
import org.gcube.data.analysis.nlphub.workspace.WorkspaceManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Servlet implementation class NlpMapper
@ -28,10 +30,8 @@ import org.gcube.data.analysis.nlphub.workspace.WorkspaceManager;
@WebServlet("/nlphub-mapper-servlet")
public class NLPMapper extends HttpServlet {
private static final long serialVersionUID = 1L;
private Logger logger = Logger.getLogger(NLPMapper.class.getSimpleName());
private String token; //= "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
private boolean devMode = true;
private static final Logger logger = LoggerFactory.getLogger(NLPUploader.class);
/**
* @see HttpServlet#HttpServlet()
*/
@ -58,31 +58,37 @@ public class NLPMapper extends HttpServlet {
}
private void doWork(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
logger.debug("NLPMapper");
String token = SessionUtils.getToken(request);
WorkspaceManager ws = new WorkspaceManager();
response.setContentType("application/json");
response.setCharacterEncoding("utf-8");
token = Constants.getToken(request, devMode);
String documentLink = request.getParameter("plink"); // link to text file (workspace)
String documentLink = request.getParameter("plink"); // link to text
// file
// (workspace)
String toBeMap = request.getParameter("tobemap");
String[] tokens;
if(toBeMap.indexOf("|") > 0)
String[] tokens;
if (toBeMap.indexOf("|") > 0)
tokens = toBeMap.split("\\|");
else {
tokens = new String[1];
tokens[0] = toBeMap;
}
String annotations = request.getParameter("annotations");
String language = request.getParameter("lang");
PrintWriter writer = response.getWriter();
try {
String text = getDocument(documentLink);
text = text.replaceAll("\n", "\\\\n");;
text = text.replaceAll("\n", "\\\\n");
text = text.replaceAll("\r", "\\\\r");
text = text.replaceAll("\t", "\\\\t");
text = text.replaceAll("\"", "\\\\\"");
String out = "{";
out += "\"text\":\"" + text + "\",";
out += "\"annotations\":\"" + annotations + "\",";
@ -90,9 +96,9 @@ public class NLPMapper extends HttpServlet {
out += "\"result\": [";
for (int i = 0; i < tokens.length; i++) {
String token = tokens[i];
String[] t = token.split(":::");
if(t[1].equals(Constants.ERROR_ID)) {
String tk = tokens[i];
String[] t = tk.split(":::");
if (t[1].equals(Constants.ERROR_ID)) {
logger.error("Algorithm " + t[0] + " in error. Bypass...");
continue;
}
@ -106,24 +112,26 @@ public class NLPMapper extends HttpServlet {
}
}
out += "]}";
String resultLink = saveResult(out);
String resultLink = saveResult(out, token, ws);
String outResult = "{";
outResult += "\"link\":\"" + resultLink + "\",";
outResult += "\"output\":" + out;
outResult += "}";
writer.append(outResult);
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
writer.println(new JsonManager().getErrorJsonResponse(x.getLocalizedMessage()));
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
writer.println(new JsonManager().getErrorJsonResponse(e.getLocalizedMessage()));
}
// response.getWriter().write(json);
}
private Class getMapper(String algId) throws Exception {
return Class.forName("org.gcube.nlphub.mapper.DefaultMapper");
private Class<DefaultMapper> getMapper(String algId) throws Exception {
return DefaultMapper.class;
// return
// Class.forName("org.gcube.data.analynlphub.mapper.DefaultMapper");
}
private String getDocument(String plink) throws Exception {
@ -142,20 +150,19 @@ public class NLPMapper extends HttpServlet {
}
return content;
}
private String saveResult(String jsonResult) throws NlpHubException {
WorkspaceManager ws = new WorkspaceManager();
private String saveResult(String jsonResult, String token, WorkspaceManager ws) throws NlpHubException {
long now = System.currentTimeMillis();
String fileName = "result-nlp-" + now + ".json";
ws.deleteFile(fileName, token);
byte[] byteContent = jsonResult.getBytes(StandardCharsets.UTF_8);
if (!ws.uploadFile(byteContent, fileName, Constants.DEFAULT_DESCRIPTION, token)) {
throw new NlpHubException("Error writing file: " + fileName + " on workspace (token: " + token + ")", null);
throw new NlpHubException("Error writing file: " + fileName + " on workspace (token: " + token + ")", null);
}
String link = ws.getPublicLink(fileName, token);
logger.info("Output json [" + fileName + "] created in " + (System.currentTimeMillis() - now) + " millisec.");
return link;
}

@ -14,13 +14,15 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.Part;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.legacy.Constants;
import org.gcube.data.analysis.nlphub.legacy.JsonManager;
import org.gcube.data.analysis.nlphub.legacy.NlpHubException;
import org.gcube.data.analysis.nlphub.nlp.NLpLanguageRecognizer;
import org.gcube.data.analysis.nlphub.nlp.NlpUtils;
import org.gcube.data.analysis.nlphub.session.SessionUtils;
import org.gcube.data.analysis.nlphub.workspace.WorkspaceManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Servlet implementation class NLPUploader
@ -30,10 +32,7 @@ import org.gcube.data.analysis.nlphub.workspace.WorkspaceManager;
@MultipartConfig
public class NLPUploader extends HttpServlet {
private static final long serialVersionUID = 1L;
private Logger logger = Logger.getLogger(NLPUploader.class.getSimpleName());
private boolean devMode = true;
private String token; // = "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
private WorkspaceManager ws;
private static final Logger logger = LoggerFactory.getLogger(NLPUploader.class);
/**
* @see HttpServlet#HttpServlet()
@ -58,21 +57,22 @@ public class NLPUploader extends HttpServlet {
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
token = Constants.getToken(request, devMode);
doWork(request, response);
}
private void doWork(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
ws = new WorkspaceManager();
logger.debug("NLPUploader");
String token = SessionUtils.getToken(request);
WorkspaceManager ws = new WorkspaceManager();
response.setContentType("application/json;charset=UTF-8");
if (request.getParameter("freetext") == null)
handleFileUpload(request, response);
handleFileUpload(request, response, token, ws);
else
handleFreeText(request, response);
handleFreeText(request, response, token, ws);
}
private void handleFreeText(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
private void handleFreeText(HttpServletRequest request, HttpServletResponse response, String token,
WorkspaceManager ws) throws ServletException, IOException {
String freeText = request.getParameter("freetext");
freeText = NlpUtils.replaceDirtyCharacters(freeText);
@ -86,25 +86,24 @@ public class NLPUploader extends HttpServlet {
return;
}
String link = ws.getPublicLink(fileName, token);
if(request.getParameter("getlang") != null) {
if (request.getParameter("getlang") != null) {
String sentence = NlpUtils.getLanguageRecognizerDigest(new String(content));
logger.info(sentence);
try {
NLpLanguageRecognizer.run(request.getParameter("dataminer"), sentence, token, link, response);
} catch (NlpHubException ex) {
writer.println(new JsonManager().getSuccessJsonResponse(Constants.UNAVAILABLE, link));
}
}
else writer.println(new JsonManager().getSuccessJsonResponse("" + link));
} catch (Exception x) {
x.printStackTrace();
logger.error(x.getClass().getName() + ": " + x.getLocalizedMessage());
writer.println(new JsonManager().getErrorJsonResponse(x.getLocalizedMessage()));
}
} else
writer.println(new JsonManager().getSuccessJsonResponse("" + link));
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
writer.println(new JsonManager().getErrorJsonResponse(e.getLocalizedMessage()));
}
}
private void handleFileUpload(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
private void handleFileUpload(HttpServletRequest request, HttpServletResponse response, String token,
WorkspaceManager ws) throws ServletException, IOException {
int contentLength = request.getContentLength();
Part filePart = request.getPart("mytxtfile");
@ -154,10 +153,9 @@ public class NLPUploader extends HttpServlet {
}
// writer.println(new JsonManager().getSuccessJsonResponse("" +
// link));
} catch (Exception x) {
x.printStackTrace();
logger.error(x.getClass().getName() + ": " + x.getLocalizedMessage());
writer.println(new JsonManager().getErrorJsonResponse(x.getLocalizedMessage()));
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
writer.println(new JsonManager().getErrorJsonResponse(e.getLocalizedMessage()));
}
}

@ -6,13 +6,17 @@ import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.nlp.NlpParameter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AsyncHttpRequest extends Thread {
private String baseUrl, finalUrl, result, method;
private ArrayList<NlpParameter> parameters;
private Logger logger = Logger.getLogger(AsyncHttpRequest.class.getSimpleName());
private static final Logger logger = LoggerFactory.getLogger(AsyncHttpRequest.class);
protected long elapsedTime;
public AsyncHttpRequest() {
@ -20,13 +24,13 @@ public class AsyncHttpRequest extends Thread {
this.parameters = null;
this.method = "GET";
finalUrl = null;
elapsedTime = 0;
elapsedTime = 0;
}
public AsyncHttpRequest(String baseUrl, String method, ArrayList<NlpParameter> parameters) {
this.baseUrl = baseUrl;
this.parameters = parameters;
if(method == null)
if (method == null)
this.method = "GET";
else
this.method = (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("POST")) ? method : "GET";
@ -36,7 +40,7 @@ public class AsyncHttpRequest extends Thread {
public void run() {
elapsedTime = System.currentTimeMillis();
if(finalUrl == null)
if (finalUrl == null)
finalUrl = baseUrl;
HttpURLConnection connection = null;
BufferedReader r = null;
@ -56,11 +60,11 @@ public class AsyncHttpRequest extends Thread {
if (line != null)
result += line.trim();
}
asyncHttpRequestCallback();
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
asyncHttpRequestCallback();
} finally {
try {
@ -69,19 +73,19 @@ public class AsyncHttpRequest extends Thread {
if (connection != null)
connection.disconnect();
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
logger.error(e.getLocalizedMessage(),e);
}
}
}
public String getResult() {
return result;
}
public void asyncHttpRequestCallback() {
elapsedTime = System.currentTimeMillis() - elapsedTime;
}
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
@ -103,8 +107,8 @@ public class AsyncHttpRequest extends Thread {
try {
finalUrl += p.getName() + "=" + URLEncoder.encode((String) p.getValue(), "UTF-8");
finalUrl += "&";
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
}
}
finalUrl = finalUrl.substring(0, finalUrl.length() - 1);

@ -1,29 +1,29 @@
package org.gcube.data.analysis.nlphub.legacy;
import javax.servlet.http.HttpServletRequest;
public class Constants {
//public static String DATAMINER_URL = "http://dataminer0-proto.d4science.org";
//public static String DATAMINER_URL = "http://dataminer2-proto.d4science.org";
public static String DATAMINER_URL = "http://dataminer-prototypes.d4science.org";
public static String DEFAULT_DESCRIPTION = "NlpHub upload";
public static String TOKEN_PARAMETER = "gcube-token";
public static String TEST_TOKEN = "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
public static String PUBLIC_TOKEN = "f57441ac-b361-4c2d-992a-40db034f1b8c-843339462";
public static String MIME_TEXT = "text/plain";
public static String CONTENT_TYPE = "Content-Type";
public static String UNAVAILABLE = "unavailable";
public static String ERROR_ID = "ERROR";
public static String INPUT_FILE_PARAMETER = "input";
public static String getToken(HttpServletRequest request, boolean devMode) {
String token = request.getParameter(TOKEN_PARAMETER);
if(devMode) {
if(token == null) token = TEST_TOKEN;
}
return token;
}
public static String hexDump(byte[] bytes) {
public static final boolean DEBUG = false;
public static final boolean TEST_ENABLE = false;
public static final String DEFAULT_USER = "giancarlo.panichi";
public static final String DEFAULT_SCOPE = "/gcube/devNext/NextNext";
public static final String DEFAULT_TOKEN = "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
public static final String DEFAULT_DATAMINER_URL = "http://dataminer-prototypes.d4science.org";
public static final String TOKEN_PARAMETER = "gcube-token";
public static final String DEFAULT_DESCRIPTION = "NlpHub upload";
//public static String PUBLIC_TOKEN = "f57441ac-b361-4c2d-992a-40db034f1b8c-843339462";
public static final String MIME_TEXT = "text/plain";
public static final String CONTENT_TYPE = "Content-Type";
public static final String UNAVAILABLE = "unavailable";
public static final String ERROR_ID = "ERROR";
public static final String INPUT_FILE_PARAMETER = "input";
/*public static String hexDump(byte[] bytes) {
char[] hexArray = "0123456789ABCDEF".toCharArray();
char[] hexChars = new char[bytes.length * 3];
for (int j = 0; j < bytes.length; j++) {
@ -32,5 +32,5 @@ public class Constants {
hexChars[3*j+2] = ' ';
}
return new String(hexChars);
}
}*/
}

@ -1,6 +1,10 @@
package org.gcube.data.analysis.nlphub.legacy;
class DataminerClientException extends Exception {
private static final long serialVersionUID = 1L;
public DataminerClientException(String message, Throwable throwable) {
super(message, throwable);
}

@ -2,14 +2,18 @@ package org.gcube.data.analysis.nlphub.legacy;
import java.io.Reader;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
public class JsonManager {
private static final Logger logger = LoggerFactory.getLogger(JsonManager.class);
public static String TEXT = "text";
public static String ANNOTATIONS = "annotations";
public static String LANGUAGE = "language";
@ -23,9 +27,10 @@ public class JsonManager {
public static String MESSAGE = "message";
private JsonObject jsonObjectRoot = null;
private JsonParser jsonParser = null;
//private JsonParser jsonParser = null;
public JsonManager() {
logger.debug("JsonManager");
jsonObjectRoot = new JsonObject();
}

@ -1,14 +1,22 @@
package org.gcube.data.analysis.nlphub.legacy;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class NerAlgorithm {
private static final Logger logger = LoggerFactory.getLogger(NerAlgorithm.class);
private String name;
private ArrayList<NerAnnotationData> annotationsData;
public NerAlgorithm(String name) {
logger.debug("NerAlgorithm: "+name);
this.name = name;
annotationsData = new ArrayList<>();
}

@ -1,10 +1,16 @@
package org.gcube.data.analysis.nlphub.legacy;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class NerAnnotationData {
private static final Logger logger = LoggerFactory.getLogger(NerAnnotationData.class);
private String name;
private ArrayList<NerEntity> nerEntities;
@ -13,6 +19,7 @@ public class NerAnnotationData {
* @param name
*/
public NerAnnotationData(String name) {
logger.debug("NerAnnotationData: "+name);
this.name = name;
nerEntities = new ArrayList<>();
}

@ -2,11 +2,17 @@ package org.gcube.data.analysis.nlphub.legacy;
import java.util.HashMap;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class NerEntity {
private static final Logger logger = LoggerFactory.getLogger(NerEntity.class);
private int startIndex, endIndex;
private HashMap<String, String> properties = null;
@ -16,6 +22,7 @@ public class NerEntity {
* @param endIndex the end index of the matching annotation
*/
public NerEntity(int startIndex, int endIndex) {
logger.debug("NerEntity: [startIndex="+startIndex+", endIndex="+endIndex+"]");
this.startIndex = startIndex;
this.endIndex = endIndex;
properties = new HashMap<>();

@ -1,14 +1,21 @@
package org.gcube.data.analysis.nlphub.legacy;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class NerOutput {
private static final Logger logger = LoggerFactory.getLogger(NerOutput.class);
private String text, annotations, language;
private ArrayList<NerAlgorithm> result;
public NerOutput(String text, String annotations, String language) {
logger.debug("NerOutput: [text="+text+", annotations="+annotations+", language="+language+"]");
this.text = text;
this.annotations = annotations;
this.language = language;

@ -1,6 +1,9 @@
package org.gcube.data.analysis.nlphub.legacy;
public class NlpHubException extends Exception {
private static final long serialVersionUID = 1L;
public NlpHubException(String message, Throwable throwable) {
super(message, throwable);
}

@ -10,13 +10,15 @@ import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.legacy.NerAlgorithm;
import org.gcube.data.analysis.nlphub.legacy.NerAnnotationData;
import org.gcube.data.analysis.nlphub.legacy.NerEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DefaultMapper implements JsonMapper {
private Logger logger = Logger.getLogger(DefaultMapper.class.getSimpleName());
private static final Logger logger = LoggerFactory.getLogger(DefaultMapper.class);
public String getJson(String alg, String link) {
NerAlgorithm algInfo = new NerAlgorithm(alg);
@ -59,33 +61,10 @@ public class DefaultMapper implements JsonMapper {
}
}
return algInfo.toJson().toString();
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(),e);
return null;
}
}
/*
public static void main(String[] args) {
String remoteJson = "http://data.d4science.org/a2JtV0lOUUxsenZCM2RmcFRqVGtWWm42NU9LQnFGTk9HbWJQNStIS0N6Yz0-VLT";
String alg = "ENGLISH_NAMED_ENTITY_RECOGNIZER";
//remoteJson = "http://data.d4science.org/T21IcHlMSzFJRCttZ1lHVzMxd2dyWjVOUEpyY2dwUVNHbWJQNStIS0N6Yz0-VLT";
//remoteJson = "http://data.d4science.org/bDZLVkdlaVBjZCtLSWsrWUNQdHk3MUs4cEtDUGF5NktHbWJQNStIS0N6Yz0";
DefaultMapper dm = new DefaultMapper();
try {
String s = dm.getJson(alg, remoteJson);
System.out.println("" + s);
String file = "/home/erico/debug.json";
java.io.File f = new java.io.File(file);
f.createNewFile();
java.io.FileWriter w = new java.io.FileWriter(f);
w.write(s);
w.flush();
w.close();
} catch (Exception x) {
System.out.println(x.getLocalizedMessage());
x.printStackTrace();
}
}*/
}

@ -5,45 +5,50 @@ import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.legacy.Constants;
import org.gcube.data.analysis.nlphub.legacy.JsonManager;
import org.gcube.data.analysis.nlphub.legacy.NlpHubException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
public class NLpLanguageRecognizer {
private HttpServletResponse response;
private Logger logger = Logger.getLogger(NLpLanguageRecognizer.class.getSimpleName());
private String sentence, publicLink;
private static final Logger logger = LoggerFactory.getLogger(NLpLanguageRecognizer.class);
// private HttpServletResponse response;
// private String sentence, publicLink;
public final static String RECOGNIZER_ID = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LANGUAGE_RECOGNIZER";
private String dataMiner = null;
// private String dataMiner = null;
public NLpLanguageRecognizer(String service, String token, String sentence) {
this.sentence = sentence;
response = null;
logger.debug(
"NLpLanguageRecognizer: [service=" + service + ", token=" + token + ", sentence=" + sentence + "]");
// this.sentence = sentence;
// response = null;
}
public NLpLanguageRecognizer(String dataMiner, String service, String token, String sentence, String publicLink,
HttpServletResponse response) {
this.sentence = sentence;
this.response = response;
this.publicLink = publicLink;
this.dataMiner = dataMiner;
logger.debug("NLpLanguageRecognizer: [dataMiner=" + dataMiner + ",service=" + service + ", token=" + token
+ ", sentence=" + sentence + ",publicLink" + publicLink + "]");
// this.sentence = sentence;
// this.response = response;
// this.publicLink = publicLink;
// this.dataMiner = dataMiner;
}
public static void run(String dataMiner, String sentence, String token, String publicLink, HttpServletResponse response) throws NlpHubException {
public static void run(String dataMiner, String sentence, String token, String publicLink,
HttpServletResponse response) throws NlpHubException {
try {
if(dataMiner == null)
dataMiner = Constants.DATAMINER_URL;
if (dataMiner == null)
dataMiner = Constants.DEFAULT_DATAMINER_URL;
else
dataMiner = "http://" + dataMiner;
String urlService = dataMiner + "/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
@ -53,7 +58,8 @@ public class NLpLanguageRecognizer {
urlService += "&DataInputs=sentence=" + URLEncoder.encode(sentence, "UTF-8");
URL url = new URL(urlService);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
//connection.setRequestProperty(Constants.TOKEN_PARAMETER, super.getToken());
// connection.setRequestProperty(Constants.TOKEN_PARAMETER,
// super.getToken());
connection.setDoInput(true);
connection.setDoOutput(true);
connection.setUseCaches(false);
@ -64,33 +70,31 @@ public class NLpLanguageRecognizer {
doc.getDocumentElement().normalize();
NodeList nListData = doc.getElementsByTagName("d4science:Data");
NodeList nListDesc = doc.getElementsByTagName("d4science:Description");
int len = nListData.getLength();
for(int i=0; i<len; i++) {
for (int i = 0; i < len; i++) {
Node data = nListData.item(i);
Node description = nListDesc.item(i);
String link = data.getTextContent();
String type = description.getTextContent();
if(type.equals("outfile")) {
//System.out.println(link);
if (type.equals("outfile")) {
// System.out.println(link);
String content = readFileContent(link, token);
if (response != null) {
response.getWriter()
.println(new JsonManager().getSuccessJsonResponse(content, publicLink));
}
else {
Logger.getLogger(NLpLanguageRecognizer.class.getSimpleName()).debug(new JsonManager().getSuccessJsonResponse(content, publicLink));
response.getWriter().println(new JsonManager().getSuccessJsonResponse(content, publicLink));
} else {
logger.debug(new JsonManager().getSuccessJsonResponse(content, publicLink));
}
}
}
} catch (Exception e) {
Logger.getLogger(NLpLanguageRecognizer.class.getSimpleName()).error(e.getLocalizedMessage());
logger.error(e.getLocalizedMessage(), e);
throw new NlpHubException(e.getLocalizedMessage(), e);
}
}
private static String readFileContent(String link, String token) throws Exception {
URL url = new URL(link);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
@ -110,25 +114,20 @@ public class NLpLanguageRecognizer {
String out = response.toString();
return out;
}
/*
private String readFileContent(String link) throws Exception {
URL url = new URL(link);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestProperty(Constants.TOKEN_PARAMETER, super.getToken());
connection.setDoInput(true);
connection.setDoOutput(true);
connection.setUseCaches(false);
connection.setRequestMethod("GET");
/*
* private String readFileContent(String link) throws Exception { URL url =
* new URL(link); HttpURLConnection connection = (HttpURLConnection)
* url.openConnection();
* connection.setRequestProperty(Constants.TOKEN_PARAMETER,
* super.getToken()); connection.setDoInput(true);
* connection.setDoOutput(true); connection.setUseCaches(false);
* connection.setRequestMethod("GET");
*
* BufferedReader r = new BufferedReader(new
* InputStreamReader(connection.getInputStream())); StringBuffer response =
* new StringBuffer(); String inputLine; while ((inputLine = r.readLine())
* != null) { response.append(inputLine); } connection.disconnect(); String
* out = response.toString(); return out; }
*/
BufferedReader r = new BufferedReader(new InputStreamReader(connection.getInputStream()));
StringBuffer response = new StringBuffer();
String inputLine;
while ((inputLine = r.readLine()) != null) {
response.append(inputLine);
}
connection.disconnect();
String out = response.toString();
return out;
}*/
}

@ -11,37 +11,41 @@ import java.util.ArrayList;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.legacy.AsyncHttpRequest;
import org.gcube.data.analysis.nlphub.legacy.Constants;
import org.gcube.data.analysis.nlphub.legacy.NerOutput;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
public class NlpAsyncNerRunner extends AsyncHttpRequest {
public static String WPS_EXECUTE_URL = "/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
public static String WPS_DESCRIBE_PROCESS_URL = "/wps/WebProcessingService?request=DescribeProcess&service=WPS&Version=1.0.0";
private String identifier, token, httpMethod, annotations, publicLink, language, dataMiner;
private Logger logger = Logger.getLogger(NlpAsyncNerRunner.class.getSimpleName());
private static final Logger logger = LoggerFactory.getLogger(NerOutput.class);
public static final String WPS_EXECUTE_URL = "/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0";
public static final String WPS_DESCRIBE_PROCESS_URL = "/wps/WebProcessingService?request=DescribeProcess&service=WPS&Version=1.0.0";
private String identifier, token, httpMethod, annotations, publicLink, dataMiner;
private RunnerCommander commander;
public NlpAsyncNerRunner(String dataMiner, String identifier, String token, String publicLink, String annotations, String language, RunnerCommander commander) {
public NlpAsyncNerRunner(String dataMiner, String identifier, String token, String publicLink, String annotations,
String language, RunnerCommander commander) {
super();
this.identifier = identifier;
this.token = token;
this.httpMethod = "GET";
this.annotations = annotations;
this.publicLink = publicLink;
this.language = language; // not used for the moment...
this.commander = commander;
if(dataMiner == null)
this.dataMiner = Constants.DATAMINER_URL;
else
if (dataMiner == null)
this.dataMiner = Constants.DEFAULT_DATAMINER_URL;
else
this.dataMiner = "http://" + dataMiner;
ArrayList<NlpParameter> params = buildParameterString();
String serviceUrl = this.dataMiner + WPS_EXECUTE_URL + "&gcube-token=" + token + "&lang=en-US&Identifier=" + identifier;
String serviceUrl = this.dataMiner + WPS_EXECUTE_URL + "&gcube-token=" + token + "&lang=en-US&Identifier="
+ identifier;
serviceUrl += "&" + setUrl(params);
super.setBaseUrl(serviceUrl);
super.setMethod(httpMethod);
@ -120,8 +124,8 @@ public class NlpAsyncNerRunner extends AsyncHttpRequest {
parameters.add(nlpParam);
}
} catch (Exception x) {
logger.error(x.getLocalizedMessage());
} catch (Exception e) {
logger.error(e.getLocalizedMessage(), e);
} finally {
try {
if (r != null)
@ -129,7 +133,7 @@ public class NlpAsyncNerRunner extends AsyncHttpRequest {
if (connection != null)
connection.disconnect();
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
logger.error(e.getLocalizedMessage(), e);
}
}
return parameters;
@ -138,7 +142,7 @@ public class NlpAsyncNerRunner extends AsyncHttpRequest {
public long getElapsedTime() {
return elapsedTime;
}
@Override
public void asyncHttpRequestCallback() {
elapsedTime = System.currentTimeMillis() - elapsedTime;
@ -147,35 +151,35 @@ public class NlpAsyncNerRunner extends AsyncHttpRequest {
String theLink = "";
try {
BufferedReader r = new BufferedReader(
new InputStreamReader(new ByteArrayInputStream(result.getBytes(StandardCharsets.UTF_8))));
new InputStreamReader(new ByteArrayInputStream(result.getBytes(StandardCharsets.UTF_8))));
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new InputSource(r));
doc.getDocumentElement().normalize();
NodeList nListResult = doc.getElementsByTagName("ogr:Result");
for (int i=0, found=0; (i<nListResult.getLength()) && (found==0); i++) {
for (int i = 0, found = 0; (i < nListResult.getLength()) && (found == 0); i++) {
Node nodeResult = nListResult.item(i);
NodeList list = nodeResult.getChildNodes();
String res = "";
for(int j=0; j<list.getLength(); j++) {
for (int j = 0; j < list.getLength(); j++) {
Node node = list.item(j);
if(node.getNodeName().equals("d4science:Data")) {
if (node.getNodeName().equals("d4science:Data")) {
res = node.getTextContent();
}
else if(node.getNodeName().equals("d4science:MimeType")) {
if(node.getTextContent().equals("application/d4science")) {
} else if (node.getNodeName().equals("d4science:MimeType")) {
if (node.getTextContent().equals("application/d4science")) {
found = 1;
}
}
}
if(found > 0) {
if (found > 0) {
theLink = res;
}
}
commander.updateResultList(identifier.substring(identifier.lastIndexOf(".") + 1) + ":::" + theLink);
} catch (Exception x) {
commander.updateResultList(identifier.substring(identifier.lastIndexOf(".") + 1) + ":::" + Constants.ERROR_ID);
logger.error(x.getLocalizedMessage());
} catch (Exception e) {
commander.updateResultList(
identifier.substring(identifier.lastIndexOf(".") + 1) + ":::" + Constants.ERROR_ID);
logger.error(e.getLocalizedMessage(), e);
}
}
}

@ -1,7 +1,5 @@
package org.gcube.data.analysis.nlphub.nlp;
import java.util.ArrayList;
public class NlpUtils {
public static String getLanguageRecognizerDigest(String content) {

@ -5,11 +5,14 @@ import java.util.ArrayList;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.legacy.Constants;
import org.gcube.data.analysis.nlphub.legacy.JsonManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RunnerCommander extends Thread {
private static final Logger logger = LoggerFactory.getLogger(RunnerCommander.class);
private HttpServletResponse response = null;
private String annotationList = "";
private String[] identifiers = null;
@ -19,7 +22,6 @@ public class RunnerCommander extends Thread {
private ArrayList<String> outResultList = null;
private long sleepTime = 500l;
private long maxWaitingTime = 5l * 60l * 1000l;
private Logger logger = Logger.getLogger(RunnerCommander.class.getSimpleName());
private boolean complete = false;
public RunnerCommander(String dataMiner, String[] identifiers, String link, String annotationList, String token) {
@ -76,8 +78,8 @@ public class RunnerCommander extends Thread {
counter += sleepTime;
if(counter > 2*maxWaitingTime)
complete = true;
} catch (InterruptedException x) {
logger.info("Interrupted.");
} catch (InterruptedException e) {
logger.info("Interrupted. "+identifiers);
}
}
}
@ -107,7 +109,7 @@ public class RunnerCommander extends Thread {
}
logger.error("Timeout error.");
timeoutHandler();
} catch (InterruptedException x) {
} catch (InterruptedException e) {
logger.info("Elapsed time: " + counter + " msec.");
logger.info("Thread interrupted.");
timeoutHandler();
@ -148,8 +150,7 @@ public class RunnerCommander extends Thread {
}
} catch (Exception ex) {
logger.error(ex.getLocalizedMessage());
ex.printStackTrace();
logger.error(ex.getLocalizedMessage(),ex);
} finally {
complete = true;
if (writer != null)

@ -0,0 +1,22 @@
package org.gcube.data.analysis.nlphub.session;
import javax.servlet.http.HttpServletRequest;
import org.gcube.data.analysis.nlphub.legacy.Constants;
public class SessionUtils {
public static String getToken(HttpServletRequest request) {
String token = request.getParameter(Constants.TOKEN_PARAMETER);
if(token == null || token.isEmpty()){
if(Constants.DEBUG){
token = Constants.DEFAULT_TOKEN;
} else {
}
}
return token;
}
}

@ -10,15 +10,13 @@ import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import javax.servlet.http.Part;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.nlphub.NLPUploader;
import org.gcube.data.analysis.nlphub.legacy.Constants;
import org.gcube.data.analysis.nlphub.legacy.NlpHubException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WorkspaceManager {
private Logger logger = Logger.getLogger(WorkspaceManager.class.getSimpleName());
private static final Logger logger = LoggerFactory.getLogger(WorkspaceManager.class);
public String getPublicLink(String fileName, String token) throws NlpHubException {
try {
@ -62,7 +60,7 @@ public class WorkspaceManager {
return link;
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
logger.error(e.getLocalizedMessage(), e);
throw new NlpHubException(e.getLocalizedMessage(), e);
}
}
@ -93,8 +91,9 @@ public class WorkspaceManager {
}
String xmlOut = response.toString();
logger.debug("deleteFile: " + xmlOut);
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
logger.error(e.getLocalizedMessage(), e);
throw new NlpHubException(e.getLocalizedMessage(), e);
}
}
@ -133,7 +132,7 @@ public class WorkspaceManager {
return true;
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
logger.error(e.getLocalizedMessage(), e);
throw new NlpHubException(e.getLocalizedMessage(), e);
} finally {
// output stream must be closed anyway...
@ -141,7 +140,7 @@ public class WorkspaceManager {
try {
output.close();
} catch (IOException e) {
logger.error(e.getLocalizedMessage());
logger.error(e.getLocalizedMessage(), e);
}
}
}

@ -1,12 +0,0 @@
log4j.rootLogger=DEBUG, A1
log4j.appender.A1=org.apache.log4j.ConsoleAppender
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
# Print the date in ISO 8601 format
log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c - %m%n
# Print only messages of level TRACE or above in the package org.gcube
log4j.logger.org.gcube=TRACE
log4j.logger.org.gcube.application.framework.core.session=INFO
log4j.logger.org.gcube.common.scope.impl.DefaultScopeProvider=ERROR
log4j.logger.com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor=ERROR

@ -1,8 +0,0 @@
log4j.rootLogger=INFO,stdout
log4j.logger.com.endeca=INFO
# Logger for crawl metrics
log4j.logger.com.endeca.itl.web.metrics=INFO
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%p\t%d{ISO8601}\t%r\t%c\t[%t]\t%m%n

@ -1,20 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
</appender>
<logger name="org.gcube" level="INFO" />
<logger name="org.gcube.vremanagement.smartexecutor" level="TRACE" />
<logger name="org.gcube.dataharvest" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>

@ -0,0 +1,83 @@
<configuration scan="true" debug="false">
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>/home/gcube/tomcat/logs/ghn.log</file>
<append>true</append>
<encoder>
<pattern>%date [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>/home/gcube/tomcat/logs/ghn.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<maxHistory>30</maxHistory>
<maxFileSize>10MB</maxFileSize>
<totalSizeCap>2GB</totalSizeCap>
</rollingPolicy>
</appender>
<appender name="ACCOUNT_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>/home/gcube/tomcat/logs/accounting.log</file>
<append>true</append>
<encoder>
<pattern>%date [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>/home/gcube/tomcat/logs/accounting.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<maxHistory>30</maxHistory>
<maxFileSize>10MB</maxFileSize>
<totalSizeCap>2GB</totalSizeCap>
</rollingPolicy>
</appender>
<appender name="ACCESS_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>/home/gcube/tomcat/logs/access.log</file>
<append>true</append>
<encoder>
<pattern>%date [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>/home/gcube/tomcat/logs/access.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<maxHistory>30</maxHistory>
<maxFileSize>10MB</maxFileSize>
<totalSizeCap>2GB</totalSizeCap>
</rollingPolicy>
</appender>
<appender name="NLP_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>/home/gcube/tomcat/logs/nlphub.log</file>
<append>true</append>
<encoder>
<pattern>%date [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>/home/gcube/tomcat/logs/nlphub.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<maxHistory>30</maxHistory>
<maxFileSize>10MB</maxFileSize>
<totalSizeCap>2GB</totalSizeCap>
</rollingPolicy>
</appender>
<logger name="org.gcube.data.publishing" level="ERROR">
<appender-ref ref="ACCOUNT_FILE" />
</logger>
<logger name="org.gcube.documentstore" level="ERROR">
<appender-ref ref="ACCOUNT_FILE" />
</logger>
<logger name="org.gcube.smartgears.handlers.application.request" level="INFO">
<appender-ref ref="ACCESS_FILE" />
</logger>
<logger name="org.gcube" level="WARN" />
<logger name="org.gcube.smartgears" level="WARN" />
<logger name="org.gcube.common.events" level="WARN" />
<logger name="org.gcube.data.analysis.nlphub" level="DEBUG">
<appender-ref ref="NLP_FILE" />
</logger>
<root level="WARN">
<appender-ref ref="FILE" />
</root>
</configuration>

@ -11,7 +11,7 @@
</welcome-file-list>
<servlet>
<servlet-name>NLPServlet</servlet-name>
<servlet-class>org.gcube.nlphub.NLPHub</servlet-class>
<servlet-class>org.gcube.data.analysis.nlphub.NLPHub</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>NLPServlet</servlet-name>
@ -19,7 +19,7 @@
</servlet-mapping>
<servlet>
<servlet-name>NLPUploader</servlet-name>
<servlet-class>org.gcube.nlphub.NLPUploader</servlet-class>
<servlet-class>org.gcube.data.analysis.nlphub.NLPUploader</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>NLPUploader</servlet-name>
@ -27,7 +27,7 @@
</servlet-mapping>
<servlet>
<servlet-name>NLPMapper</servlet-name>
<servlet-class>org.gcube.nlphub.NLPMapper</servlet-class>
<servlet-class>org.gcube.data.analysis.nlphub.NLPMapper</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>NLPMapper</servlet-name>

@ -23,7 +23,15 @@
<script type="text/javascript">
var activePage = "Named Entity Recognition";
var inputFile = '<%= ((request.getParameter(Constants.INPUT_FILE_PARAMETER) == null) ? "" : "" + request.getParameter(Constants.INPUT_FILE_PARAMETER)) %>';
var gCubeToken = '<%= ((request.getParameter(Constants.TOKEN_PARAMETER) == null) ? "" : "" + request.getParameter(Constants.TOKEN_PARAMETER)) %>';
<%
String requestToken=request.getParameter(Constants.TOKEN_PARAMETER);
if( requestToken == null||requestToken.isEmpty()) {
%>
var gCubeToken = "fea75a5a-d84c-495f-b0ca-09cdd95bacce-843339462";
<% } else { %>
var gCubeToken = <%=requestToken%>
<% } %>
var scheme = '<%= request.getScheme() %>';
var contextPath = '<%= request.getContextPath() %>';
</script>

Loading…
Cancel
Save