nlphub/src/main/java/org/gcube/nlphub/nlp/NLpLanguageRecognizer.java

101 lines
3.8 KiB
Java

package org.gcube.nlphub.nlp;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.gcube.data.analysis.dataminermanagercl.server.dmservice.SClient;
import org.gcube.data.analysis.dataminermanagercl.shared.data.OutputData;
import org.gcube.data.analysis.dataminermanagercl.shared.data.computations.ComputationId;
import org.gcube.data.analysis.dataminermanagercl.shared.data.output.FileResource;
import org.gcube.data.analysis.dataminermanagercl.shared.data.output.MapResource;
import org.gcube.data.analysis.dataminermanagercl.shared.data.output.Resource;
import org.gcube.data.analysis.dataminermanagercl.shared.parameters.FileParameter;
import org.gcube.data.analysis.dataminermanagercl.shared.parameters.ListParameter;
import org.gcube.data.analysis.dataminermanagercl.shared.parameters.ObjectParameter;
import org.gcube.data.analysis.dataminermanagercl.shared.parameters.Parameter;
import org.gcube.data.analysis.dataminermanagercl.shared.parameters.ParameterType;
import org.gcube.nlphub.legacy.DataminerClient;
import org.gcube.nlphub.legacy.NlpHubException;
public class NLpLanguageRecognizer extends DataminerClient {
private Logger logger = Logger.getLogger(NLpLanguageRecognizer.class.getSimpleName());
private String sentence;
public final static String RECOGNIZER_ID = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LANGUAGE_RECOGNIZER";
// private String service = "http://dataminer-prototypes.d4science.org/wps/";
// private String token = "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
public NLpLanguageRecognizer(String service, String token, String sentence) {
super(service, "", token);
this.sentence = sentence;
}
public void run() throws NlpHubException {
try {
super.identifier = RECOGNIZER_ID;
super.init();
ObjectParameter inputParameter = new ObjectParameter();
inputParameter.setName("sentence");
inputParameter.setValue(sentence);
ArrayList<Parameter> parameters = new ArrayList<>();
parameters.add(inputParameter);
super.execute(parameters);
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
throw new NlpHubException(e.getLocalizedMessage(), e);
}
}
@Override
public void retrieveOutput(ComputationId computationId, SClient sClient) {
try {
OutputData output = sClient.getOutputDataByComputationId(computationId);
Resource resource = output.getResource();
if (resource.isMap()) {
MapResource mapResource = (MapResource) resource;
for (String key : mapResource.getMap().keySet()) {
Resource r = mapResource.getMap().get(key);
if (r.isFile()) {
FileResource f = (FileResource) r;
String mimeType = f.getMimeType();
if (mimeType.equalsIgnoreCase("application/d4science")) {
String link = f.getUrl();
System.out.println("url: " + link);
String op = computationId.getOperatorId();
op = op.substring(op.lastIndexOf(".") + 1);
//testEndOfProcess(op + ":::" + link);
}
}
}
}
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
//writeResponse(e.getLocalizedMessage(), false);
}
}
public static void main(String[] args) {
// String service = "http://dataminer-prototypes.d4science.org/wps/";
// String token = "df2cc5f5-63ee-48c1-b2a6-1210030c57b8-843339462";
// String sentence = "Per me si va nella città dolente";
//
// NLpLanguageRecognizer recognizer = new NLpLanguageRecognizer(service, token, sentence);
// try {
// recognizer.run();
// } catch (Exception x) {
// x.printStackTrace();
// }
/*
String test = "Anch'io ho voglia di dare il mio contributo\n alla causa";
String regularized = test.replaceAll("[\\s]+", " ");
System.out.println("Before: " + test + "\n" + "After: " + regularized);
regularized = test.replaceAll("[\\n]+", " ");
System.out.println("After: " + regularized);
*/
}
}