git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataMiner@149386 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
46be6523da
commit
757f15e36b
|
@ -27,6 +27,10 @@
|
|||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
|
||||
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
|
||||
org.eclipse.jdt.core.compiler.compliance=1.7
|
||||
org.eclipse.jdt.core.compiler.compliance=1.8
|
||||
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.7
|
||||
org.eclipse.jdt.core.compiler.source=1.8
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
#### Use two appenders, one to log to console, another to log to a file
|
||||
log4j.rootCategory=ERROR,AR
|
||||
|
||||
#### Second appender writes to a file
|
||||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.stdout.Threshold=OFF
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
|
||||
|
||||
log4j.logger.AnalysisLogger=AR
|
||||
log4j.appender.AR=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.AR.Threshold=TRACE
|
||||
log4j.appender.AR.File=./Analysis.log
|
||||
log4j.appender.AR.MaxFileSize=50000KB
|
||||
log4j.appender.AR.MaxBackupIndex=2
|
||||
log4j.appender.AR.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.AR.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
|
||||
|
||||
#### Third appender writes to a file
|
||||
log4j.logger.org.hibernate=H
|
||||
log4j.appender.H=org.apache.log4j.AsyncAppender
|
||||
log4j.appender.H.Threshold=OFF
|
||||
log4j.appender.H.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.H.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
|
18
pom.xml
18
pom.xml
|
@ -101,6 +101,12 @@
|
|||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>ecological-engine-geospatial-extensions</artifactId>
|
||||
<version>[1.3.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>log4j</artifactId>
|
||||
<groupId>log4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
|
@ -154,6 +160,14 @@
|
|||
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/xml-apis/xml-apis -->
|
||||
<dependency>
|
||||
<groupId>xml-apis</groupId>
|
||||
<artifactId>xml-apis</artifactId>
|
||||
<version>2.0.2</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
|
@ -163,8 +177,8 @@
|
|||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.1</version>
|
||||
<configuration>
|
||||
<source>1.7</source>
|
||||
<target>1.7</target>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.gcube.dataanalysis.ecoengine.processing.factories.ProcessorsFactory;
|
|||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.InfrastructureDialoguer;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.ConfigurationManager;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.TokenManager;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.AlgorithmManager;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.GcubeAlgorithmRepository;
|
||||
import org.n52.wps.commons.WPSConfig;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -93,7 +93,7 @@ public class GetCapabilitiesBuilder {
|
|||
LOGGER.info("using classloader class {} ",Thread.currentThread().getContextClassLoader().getClass().getSimpleName());
|
||||
|
||||
|
||||
Set<Class<?>> algorithmsClass = AlgorithmManager.getInstance().getAllAlgorithms();
|
||||
Set<Class<?>> algorithmsClass = GcubeAlgorithmRepository.getAllAlgorithms();
|
||||
|
||||
LOGGER.info("class found with annotation Algorithm are {}",algorithmsClass.size());
|
||||
|
||||
|
|
|
@ -1,90 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import net.opengis.wps.x100.ProcessDescriptionType;
|
||||
|
||||
import org.n52.wps.algorithm.annotation.Algorithm;
|
||||
import org.n52.wps.server.IAlgorithm;
|
||||
import org.reflections.Reflections;
|
||||
import org.reflections.util.ClasspathHelper;
|
||||
import org.reflections.util.ConfigurationBuilder;
|
||||
import org.reflections.util.FilterBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class AlgorithmManager {
|
||||
|
||||
private static long UPDATE_TIME_IN_MILLIS = 60000;
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(AlgorithmManager.class);
|
||||
|
||||
private static AlgorithmManager instance= new AlgorithmManager();
|
||||
|
||||
private Long lastUpdate = 0l;
|
||||
|
||||
private Reflections reflection;
|
||||
|
||||
public static synchronized AlgorithmManager getInstance(){
|
||||
instance.updateRepository();
|
||||
return instance;
|
||||
}
|
||||
|
||||
public AlgorithmManager(){
|
||||
updateRepository();
|
||||
}
|
||||
|
||||
public ProcessDescriptionType getProcessDescription(String identifier) throws Exception{
|
||||
log.info("getProcessDescription with identifier {} ",identifier);
|
||||
Set<Class<?>> classes = reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
for (Class<?> _class: classes){
|
||||
if (_class.getAnnotation(Algorithm.class).identifier().equals(identifier)){
|
||||
return ((IAlgorithm)_class.newInstance()).getDescription();
|
||||
}
|
||||
}
|
||||
throw new Exception(String.format("Algorithm with process id %s not found", identifier));
|
||||
}
|
||||
|
||||
public boolean containsAlgorithm(String identifier) {
|
||||
log.info("containsAlgorithm with identifier {} ",identifier);
|
||||
Set<Class<?>> classes = reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
for (Class<?> _class: classes){
|
||||
if (_class.getAnnotation(Algorithm.class).identifier().equals(identifier)){
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public IAlgorithm getAlgorithm(String identifier) throws Exception{
|
||||
log.info("getAlgorithm with identifier {} ",identifier);
|
||||
Set<Class<?>> classes = reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
for (Class<?> _class: classes){
|
||||
if (_class.getAnnotation(Algorithm.class).identifier().equals(identifier)){
|
||||
if (IAlgorithm.class.isAssignableFrom(_class)){
|
||||
return (IAlgorithm)_class.newInstance();
|
||||
} else {
|
||||
log.warn("found algorothm class {} is no assignable from {}",_class.getName(), IAlgorithm.class.getName());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Exception(String.format("Algorithm with id %s not found", identifier));
|
||||
}
|
||||
|
||||
public Set<Class<?>> getAllAlgorithms() {
|
||||
return reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
}
|
||||
|
||||
private synchronized void updateRepository(){
|
||||
if ((System.currentTimeMillis()-lastUpdate)>UPDATE_TIME_IN_MILLIS){
|
||||
log.info("update time passed, updating repository");
|
||||
String packageToFind = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
|
||||
ConfigurationBuilder confBuilder = new ConfigurationBuilder()
|
||||
.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix(packageToFind)))
|
||||
.setUrls(ClasspathHelper.forClassLoader());
|
||||
reflection = new Reflections(confBuilder);
|
||||
lastUpdate = System.currentTimeMillis();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
|
||||
import net.opengis.wps.x100.ProcessDescriptionType;
|
||||
|
||||
import org.n52.wps.algorithm.annotation.Algorithm;
|
||||
import org.n52.wps.server.IAlgorithm;
|
||||
import org.n52.wps.server.IAlgorithmRepository;
|
||||
import org.reflections.Reflections;
|
||||
import org.reflections.util.ClasspathHelper;
|
||||
import org.reflections.util.ConfigurationBuilder;
|
||||
import org.reflections.util.FilterBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GcubeAlgorithmRepository implements IAlgorithmRepository {
|
||||
|
||||
private static long UPDATE_TIME_IN_MILLIS = 60000;
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(GcubeAlgorithmRepository.class);
|
||||
|
||||
//private static GcubeAlgorithmRepository instance= new GcubeAlgorithmRepository();
|
||||
|
||||
private static Long lastUpdate = 0l;
|
||||
|
||||
private static Reflections reflection;
|
||||
|
||||
|
||||
public GcubeAlgorithmRepository(){
|
||||
updateRepository();
|
||||
}
|
||||
|
||||
public ProcessDescriptionType getProcessDescription(String identifier){
|
||||
updateRepository();
|
||||
log.info("getProcessDescription with identifier {} ",identifier);
|
||||
try{
|
||||
Set<Class<?>> classes = reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
for (Class<?> _class: classes){
|
||||
if (_class.getAnnotation(Algorithm.class).identifier().equals(identifier)){
|
||||
return ((IAlgorithm)_class.newInstance()).getDescription();
|
||||
}
|
||||
}
|
||||
}catch(Exception e){}
|
||||
throw new RuntimeException(String.format("Algorithm with process id %s not found", identifier));
|
||||
}
|
||||
|
||||
public boolean containsAlgorithm(String identifier) {
|
||||
updateRepository();
|
||||
log.info("containsAlgorithm with identifier {} ",identifier);
|
||||
Set<Class<?>> classes = reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
for (Class<?> _class: classes){
|
||||
if (_class.getAnnotation(Algorithm.class).identifier().equals(identifier)){
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public IAlgorithm getAlgorithm(String identifier){
|
||||
updateRepository();
|
||||
log.info("getAlgorithm with identifier {} ",identifier);
|
||||
try{
|
||||
Set<Class<?>> classes = reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
for (Class<?> _class: classes){
|
||||
if (_class.getAnnotation(Algorithm.class).identifier().equals(identifier)){
|
||||
if (IAlgorithm.class.isAssignableFrom(_class)){
|
||||
return (IAlgorithm)_class.newInstance();
|
||||
} else {
|
||||
log.warn("found algorothm class {} is no assignable from {}",_class.getName(), IAlgorithm.class.getName());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}catch(Exception e){}
|
||||
throw new RuntimeException(String.format("Algorithm with id %s not found", identifier));
|
||||
}
|
||||
|
||||
public static Set<Class<?>> getAllAlgorithms() {
|
||||
updateRepository();
|
||||
return reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
}
|
||||
|
||||
private static synchronized void updateRepository(){
|
||||
if ((System.currentTimeMillis()-lastUpdate)>UPDATE_TIME_IN_MILLIS){
|
||||
log.info("update time passed, updating repository");
|
||||
String packageToFind = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
|
||||
ConfigurationBuilder confBuilder = new ConfigurationBuilder()
|
||||
.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix(packageToFind)))
|
||||
.setUrls(ClasspathHelper.forClassLoader());
|
||||
reflection = new Reflections(confBuilder);
|
||||
lastUpdate = System.currentTimeMillis();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<String> getAlgorithmNames() {
|
||||
updateRepository();
|
||||
Collection<String> toReturn = new ArrayList<String>();
|
||||
Set<Class<?>> classes = reflection.getTypesAnnotatedWith(Algorithm.class);
|
||||
for (Class<?> _class: classes){
|
||||
toReturn.add(_class.getAnnotation(Algorithm.class).title());
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
|
||||
}
|
||||
}
|
|
@ -1,849 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.web;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.math.BigInteger;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import net.opengis.ows.x11.BoundingBoxType;
|
||||
import net.opengis.ows.x11.ExceptionType;
|
||||
import net.opengis.wps.x100.ComplexDataType;
|
||||
import net.opengis.wps.x100.DataInputsType;
|
||||
import net.opengis.wps.x100.DocumentOutputDefinitionType;
|
||||
import net.opengis.wps.x100.ExecuteDocument;
|
||||
import net.opengis.wps.x100.ExecuteDocument.Execute;
|
||||
import net.opengis.wps.x100.InputDescriptionType;
|
||||
import net.opengis.wps.x100.InputReferenceType;
|
||||
import net.opengis.wps.x100.InputType;
|
||||
import net.opengis.wps.x100.LiteralDataType;
|
||||
import net.opengis.wps.x100.OutputDefinitionType;
|
||||
import net.opengis.wps.x100.OutputDescriptionType;
|
||||
import net.opengis.wps.x100.ProcessDescriptionType;
|
||||
import net.opengis.wps.x100.ResponseDocumentType;
|
||||
import net.opengis.wps.x100.ResponseFormType;
|
||||
import net.opengis.wps.x100.StatusType;
|
||||
|
||||
import org.apache.commons.collections.map.CaseInsensitiveMap;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlException;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.apache.xmlbeans.XmlOptions;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.AlgorithmManager;
|
||||
import org.n52.wps.commons.context.ExecutionContext;
|
||||
import org.n52.wps.commons.context.ExecutionContextFactory;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
import org.n52.wps.io.data.IData;
|
||||
import org.n52.wps.server.AbstractTransactionalAlgorithm;
|
||||
import org.n52.wps.server.ExceptionReport;
|
||||
import org.n52.wps.server.IAlgorithm;
|
||||
import org.n52.wps.server.database.DatabaseFactory;
|
||||
import org.n52.wps.server.observerpattern.IObserver;
|
||||
import org.n52.wps.server.observerpattern.ISubject;
|
||||
import org.n52.wps.server.request.InputHandler;
|
||||
import org.n52.wps.server.request.Request;
|
||||
import org.n52.wps.server.response.Response;
|
||||
import org.n52.wps.util.XMLBeansHelper;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
|
||||
/**
|
||||
* Handles an ExecuteRequest
|
||||
*/
|
||||
public class ExecuteRequest extends Request implements IObserver {
|
||||
|
||||
private ExecuteDocument execDom;
|
||||
private Map<String, IData> returnResults;
|
||||
private ExecuteResponseBuilder execRespType;
|
||||
|
||||
/**
|
||||
* Creates an ExecuteRequest based on a Document (HTTP_POST)
|
||||
*
|
||||
* @param doc
|
||||
* The clients submission
|
||||
* @throws ExceptionReport
|
||||
*/
|
||||
public ExecuteRequest(Document doc) throws ExceptionReport {
|
||||
super(doc);
|
||||
initWpsID();
|
||||
try {
|
||||
LOGGER.debug("Preparing the ExecuteRequest for POST");
|
||||
XmlOptions option = new XmlOptions();
|
||||
option.setLoadTrimTextBuffer();
|
||||
LOGGER.debug("Parsing document");
|
||||
this.execDom = ExecuteDocument.Factory.parse(doc, option);
|
||||
if (this.execDom == null) {
|
||||
LOGGER.error("ExecuteDocument is null");
|
||||
throw new ExceptionReport("Error while parsing post data", ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
} catch (XmlException e) {
|
||||
LOGGER.error("EXCEPTION Error while parsing post data: {} ",ExceptionReport.MISSING_PARAMETER_VALUE,e);
|
||||
throw new ExceptionReport("Error while parsing post data", ExceptionReport.MISSING_PARAMETER_VALUE, e);
|
||||
}
|
||||
|
||||
LOGGER.debug("Validating document");
|
||||
// validate the client input
|
||||
// validate();
|
||||
LOGGER.debug("Document OK");
|
||||
// create an initial response
|
||||
execRespType = new ExecuteResponseBuilder(this);
|
||||
LOGGER.debug("Response Builder Ready");
|
||||
storeRequest(execDom);
|
||||
LOGGER.debug("Request Stored");
|
||||
}
|
||||
|
||||
/*
|
||||
* Creates an ExecuteRequest based on a Map (HTTP_GET). NOTE: Parameters are
|
||||
* treated as non case sensitive. @param ciMap The client input @throws
|
||||
* ExceptionReport
|
||||
*/
|
||||
public ExecuteRequest(CaseInsensitiveMap ciMap) throws ExceptionReport {
|
||||
super(ciMap);
|
||||
initWpsID();
|
||||
initForGET(ciMap);
|
||||
// validate the client input
|
||||
validate();
|
||||
|
||||
// create an initial response
|
||||
execRespType = new ExecuteResponseBuilder(this);
|
||||
|
||||
storeRequest(ciMap);
|
||||
}
|
||||
|
||||
public void getKVPDataInputs() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @param ciMap
|
||||
*/
|
||||
private void initForGET(CaseInsensitiveMap ciMap) throws ExceptionReport {
|
||||
String version = getMapValue("version", ciMap, true);
|
||||
if (!version.equals(Request.SUPPORTED_VERSION)) {
|
||||
throw new ExceptionReport("request version is not supported: " + version, ExceptionReport.VERSION_NEGOTIATION_FAILED);
|
||||
}
|
||||
this.execDom = ExecuteDocument.Factory.newInstance();
|
||||
Execute execute = execDom.addNewExecute();
|
||||
String processID = getMapValue("Identifier", true);
|
||||
if (!AlgorithmManager.getInstance().containsAlgorithm(processID)) {
|
||||
throw new ExceptionReport("Process does not exist", ExceptionReport.INVALID_PARAMETER_VALUE);
|
||||
} else {
|
||||
LOGGER.info("AlgorithmManager contains algorithm with processId {} ",processID);
|
||||
}
|
||||
execute.addNewIdentifier().setStringValue(processID);
|
||||
DataInputsType dataInputs = execute.addNewDataInputs();
|
||||
String dataInputString = getMapValue("DataInputs", true);
|
||||
dataInputString = dataInputString.replace("&", "&");
|
||||
String[] inputs = dataInputString.split(";");
|
||||
|
||||
// Handle data inputs
|
||||
for (String inputString : inputs) {
|
||||
int position = inputString.indexOf("=");
|
||||
if (position == -1) {
|
||||
throw new ExceptionReport("No \"=\" supplied for attribute: " + inputString, ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
// get name
|
||||
String key = inputString.substring(0, position);
|
||||
String value = null;
|
||||
if (key.length() + 1 < inputString.length()) {
|
||||
// BS int valueDelimiter = inputString.indexOf("@");
|
||||
int valueDelimiter = inputString.indexOf("@");
|
||||
if (valueDelimiter != -1 && position + 1 < valueDelimiter) {
|
||||
value = inputString.substring(position + 1, valueDelimiter);
|
||||
} else {
|
||||
value = inputString.substring(position + 1);
|
||||
}
|
||||
}
|
||||
ProcessDescriptionType description;
|
||||
|
||||
try{
|
||||
description = AlgorithmManager.getInstance().getProcessDescription(processID);
|
||||
}catch(Exception e){
|
||||
LOGGER.error("error retrieving Process description type",e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
if (description == null) {
|
||||
throw new ExceptionReport("Data Identifier not supported: " + key, ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
InputDescriptionType inputDesc = XMLBeansHelper.findInputByID(key, description.getDataInputs());
|
||||
if (inputDesc == null) {
|
||||
throw new ExceptionReport("Data Identifier not supported: " + key, ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
InputType input = dataInputs.addNewInput();
|
||||
input.addNewIdentifier().setStringValue(key);
|
||||
// prepare attributes
|
||||
String encodingAttribute = null;
|
||||
String mimeTypeAttribute = null;
|
||||
String schemaAttribute = null;
|
||||
String hrefAttribute = null;
|
||||
String uom = null;
|
||||
String dataType = null;
|
||||
String[] inputItemstemp = inputString.split("@");
|
||||
String[] inputItems = null;
|
||||
if (inputItemstemp.length == 2) {
|
||||
inputItems = inputItemstemp[1].split("@");
|
||||
} else {
|
||||
inputItems = inputString.split("@");
|
||||
}
|
||||
if (inputItemstemp.length > 1) {
|
||||
for (int i = 0; i < inputItems.length; i++) {
|
||||
int attributePos = inputItems[i].indexOf("=");
|
||||
if (attributePos == -1 || attributePos + 1 >= inputItems[i].length()) {
|
||||
continue;
|
||||
}
|
||||
String attributeName = inputItems[i].substring(0, attributePos);
|
||||
String attributeValue = inputItems[i].substring(attributePos + 1);
|
||||
// attribute is input name
|
||||
if (attributeName.equals(key)) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
attributeValue = URLDecoder.decode(attributeValue, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new ExceptionReport("Something went wrong while trying to decode value of " + attributeName, ExceptionReport.NO_APPLICABLE_CODE, e);
|
||||
}
|
||||
if (attributeName.equalsIgnoreCase("encoding")) {
|
||||
encodingAttribute = attributeValue;
|
||||
} else if (attributeName.equalsIgnoreCase("mimeType")) {
|
||||
mimeTypeAttribute = attributeValue;
|
||||
} else if (attributeName.equalsIgnoreCase("schema")) {
|
||||
schemaAttribute = attributeValue;
|
||||
} else if (attributeName.equalsIgnoreCase("href") | attributeName.equalsIgnoreCase("xlink:href")) {
|
||||
hrefAttribute = attributeValue;
|
||||
} else if (attributeName.equalsIgnoreCase("uom")) {
|
||||
uom = attributeValue;
|
||||
} else if (attributeName.equalsIgnoreCase("datatype")) {
|
||||
dataType = attributeValue;
|
||||
} else {
|
||||
throw new ExceptionReport("Attribute is not supported: " + attributeName, ExceptionReport.INVALID_PARAMETER_VALUE);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (inputDesc.isSetComplexData()) {
|
||||
// TODO: check for different attributes
|
||||
// handling ComplexReference
|
||||
if (!(hrefAttribute == null) && !hrefAttribute.equals("")) {
|
||||
InputReferenceType reference = input.addNewReference();
|
||||
reference.setHref(hrefAttribute);
|
||||
if (schemaAttribute != null) {
|
||||
reference.setSchema(schemaAttribute);
|
||||
}
|
||||
if (mimeTypeAttribute != null) {
|
||||
reference.setMimeType(mimeTypeAttribute);
|
||||
}
|
||||
if (encodingAttribute != null) {
|
||||
reference.setEncoding(encodingAttribute);
|
||||
}
|
||||
|
||||
}
|
||||
// Handling ComplexData
|
||||
else {
|
||||
ComplexDataType data = input.addNewData().addNewComplexData();
|
||||
|
||||
InputStream stream = new ByteArrayInputStream(value.getBytes());
|
||||
|
||||
try {
|
||||
data.set(XmlObject.Factory.parse(stream));
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Could not parse value: " + value + " as XMLObject. Trying to create text node.");
|
||||
try {
|
||||
Node textNode = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument().createTextNode(value);
|
||||
data.set(XmlObject.Factory.parse(textNode));
|
||||
} catch (Exception e1) {
|
||||
throw new ExceptionReport("Exception while trying to parse value: " + value, ExceptionReport.NO_APPLICABLE_CODE, e1);
|
||||
}
|
||||
}
|
||||
|
||||
if (schemaAttribute != null) {
|
||||
data.setSchema(schemaAttribute);
|
||||
}
|
||||
if (mimeTypeAttribute != null) {
|
||||
data.setMimeType(mimeTypeAttribute);
|
||||
}
|
||||
if (encodingAttribute != null) {
|
||||
data.setEncoding(encodingAttribute);
|
||||
}
|
||||
}
|
||||
|
||||
} else if (inputDesc.isSetLiteralData()) {
|
||||
LiteralDataType data = input.addNewData().addNewLiteralData();
|
||||
if (value == null) {
|
||||
throw new ExceptionReport("No value provided for literal: " + inputDesc.getIdentifier().getStringValue(), ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
data.setStringValue(value);
|
||||
if (uom != null) {
|
||||
data.setUom(uom);
|
||||
}
|
||||
if (dataType != null) {
|
||||
data.setDataType(dataType);
|
||||
}
|
||||
} else if (inputDesc.isSetBoundingBoxData()) {
|
||||
BoundingBoxType data = input.addNewData().addNewBoundingBoxData();
|
||||
String[] values = value.split(",");
|
||||
|
||||
if (values.length < 4) {
|
||||
throw new ExceptionReport("Invalid Number of BBOX Values: " + inputDesc.getIdentifier().getStringValue(), ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
List<String> lowerCorner = new ArrayList<String>();
|
||||
lowerCorner.add(values[0]);
|
||||
lowerCorner.add(values[1]);
|
||||
data.setLowerCorner(lowerCorner);
|
||||
|
||||
List<String> upperCorner = new ArrayList<String>();
|
||||
upperCorner.add(values[2]);
|
||||
upperCorner.add(values[3]);
|
||||
data.setUpperCorner(upperCorner);
|
||||
|
||||
if (values.length > 4) {
|
||||
data.setCrs(values[4]);
|
||||
}
|
||||
|
||||
if (values.length > 5) {
|
||||
data.setDimensions(BigInteger.valueOf(Long.valueOf(values[5])));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
// retrieve status
|
||||
boolean status = false;
|
||||
String statusString = getMapValue("status", false);
|
||||
LOGGER.debug("Incoming Status Request: "+statusString);
|
||||
if (statusString != null) {
|
||||
status = Boolean.parseBoolean(statusString);
|
||||
}
|
||||
boolean store = false;
|
||||
String storeString = getMapValue("storeExecuteResponse", false);
|
||||
LOGGER.debug("Incoming storeExecuteResponse Request: "+storeString);
|
||||
if (storeString != null) {
|
||||
store = Boolean.parseBoolean(storeString);
|
||||
}
|
||||
// Handle ResponseDocument option
|
||||
String responseDocument = getMapValue("ResponseDocument", false);
|
||||
if (responseDocument != null) {
|
||||
String[] outputs = responseDocument.split(";");
|
||||
ResponseDocumentType responseDoc = execute.addNewResponseForm().addNewResponseDocument();
|
||||
responseDoc.setStatus(status);
|
||||
responseDoc.setStoreExecuteResponse(store);
|
||||
for (String outputID : outputs) {
|
||||
String[] outputDataparameters = outputID.split("@");
|
||||
String outputDataInput = "";
|
||||
if (outputDataparameters.length > 0) {
|
||||
outputDataInput = outputDataparameters[0];
|
||||
} else {
|
||||
outputDataInput = outputID;
|
||||
}
|
||||
outputDataInput = outputDataInput.replace("=", "");
|
||||
ProcessDescriptionType description;
|
||||
try{
|
||||
description = AlgorithmManager.getInstance().getProcessDescription(processID);
|
||||
}catch(Exception e){
|
||||
//LOGGER.error("Data output Identifier not supported: " + outputDataInput);
|
||||
throw new ExceptionReport("Data output Identifier not supported: " + outputDataInput, ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
OutputDescriptionType outputDesc = XMLBeansHelper.findOutputByID(outputDataInput, description.getProcessOutputs().getOutputArray());
|
||||
|
||||
DocumentOutputDefinitionType output = responseDoc.addNewOutput();
|
||||
output.addNewIdentifier().setStringValue(outputDataInput);
|
||||
|
||||
for (int i = 1; i < outputDataparameters.length; i++) {
|
||||
int attributePos = outputDataparameters[i].indexOf("=");
|
||||
if (attributePos == -1 || attributePos + 1 >= outputDataparameters[i].length()) {
|
||||
continue;
|
||||
}
|
||||
String attributeName = outputDataparameters[i].substring(0, attributePos);
|
||||
String attributeValue = outputDataparameters[i].substring(attributePos + 1);
|
||||
try {
|
||||
attributeValue = URLDecoder.decode(attributeValue, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new ExceptionReport("Something went wrong while trying to decode value of " + attributeName, ExceptionReport.NO_APPLICABLE_CODE, e);
|
||||
}
|
||||
if (attributeName.equalsIgnoreCase("mimeType")) {
|
||||
output.setMimeType(attributeValue);
|
||||
} else if (attributeName.equalsIgnoreCase("schema")) {
|
||||
output.setSchema(attributeValue);
|
||||
} else if (attributeName.equalsIgnoreCase("encoding")) {
|
||||
output.setEncoding(attributeValue);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
String rawData = getMapValue("RawDataOutput", false);
|
||||
if (rawData != null) {
|
||||
String[] rawDataparameters = rawData.split("@");
|
||||
String rawDataInput = "";
|
||||
if (rawDataparameters.length > 0) {
|
||||
rawDataInput = rawDataparameters[0];
|
||||
} else {
|
||||
rawDataInput = rawData;
|
||||
}
|
||||
ProcessDescriptionType description;
|
||||
try{
|
||||
description = AlgorithmManager.getInstance().getProcessDescription(processID);
|
||||
}catch(Exception e){
|
||||
throw new ExceptionReport("Data output Identifier not supported: " + rawData, ExceptionReport.MISSING_PARAMETER_VALUE);
|
||||
}
|
||||
OutputDescriptionType outputDesc = XMLBeansHelper.findOutputByID(rawDataInput, description.getProcessOutputs().getOutputArray());
|
||||
|
||||
ResponseFormType responseForm = execute.addNewResponseForm();
|
||||
OutputDefinitionType output = responseForm.addNewRawDataOutput();
|
||||
output.addNewIdentifier().setStringValue(outputDesc.getIdentifier().getStringValue());
|
||||
|
||||
if (rawDataparameters.length > 0) {
|
||||
for (int i = 0; i < rawDataparameters.length; i++) {
|
||||
int attributePos = rawDataparameters[i].indexOf("=");
|
||||
if (attributePos == -1 || attributePos + 1 >= rawDataparameters[i].length()) {
|
||||
continue;
|
||||
}
|
||||
String attributeName = rawDataparameters[i].substring(0, attributePos);
|
||||
String attributeValue = rawDataparameters[i].substring(attributePos + 1);
|
||||
try {
|
||||
attributeValue = URLDecoder.decode(attributeValue, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new ExceptionReport("Something went wrong while trying to decode value of " + attributeName, ExceptionReport.NO_APPLICABLE_CODE, e);
|
||||
}
|
||||
if (attributeName.equalsIgnoreCase("mimeType")) {
|
||||
output.setMimeType(attributeValue);
|
||||
} else if (attributeName.equalsIgnoreCase("schema")) {
|
||||
output.setSchema(attributeValue);
|
||||
} else if (attributeName.equalsIgnoreCase("encoding")) {
|
||||
output.setEncoding(attributeValue);
|
||||
|
||||
} else {
|
||||
throw new ExceptionReport("Attribute is not supported: " + attributeName, ExceptionReport.INVALID_PARAMETER_VALUE);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the client request
|
||||
*
|
||||
* @return True if the input is valid, False otherwise
|
||||
*/
|
||||
public boolean validate() throws ExceptionReport {
|
||||
// Identifier must be specified.
|
||||
/*
|
||||
* Only for HTTP_GET: String identifier = getMapValue("identifier");
|
||||
*
|
||||
* try{ // Specifies if all complex valued output(s) of this process
|
||||
* should be stored by process // as web-accessible resources store =
|
||||
* getMapValue("store").equals("true"); // Specifies if Execute
|
||||
* operation response shall be returned quickly with status information
|
||||
* status = getMapValue("status").equals("true"); }catch(ExceptionReport
|
||||
* e){ // if parameters "store" or "status" are not included, they
|
||||
* default to false; } // just testing if the number of arguments is
|
||||
* even... String[] diArray = getMapValue("DataInputs").split(",");
|
||||
* if(diArray.length % 2 != 0) { throw new ExceptionReport("Incorrect
|
||||
* number of arguments for parameter dataInputs, please only a even
|
||||
* number of parameter values",
|
||||
* ExceptionReport.INVALID_PARAMETER_VALUE); }
|
||||
*/
|
||||
if (!execDom.getExecute().getVersion().equals(SUPPORTED_VERSION)) {
|
||||
throw new ExceptionReport("Specified version is not supported.", ExceptionReport.INVALID_PARAMETER_VALUE, "version=" + getExecute().getVersion());
|
||||
}
|
||||
|
||||
// Fix for bug https://bugzilla.52north.org/show_bug.cgi?id=906
|
||||
String identifier = getAlgorithmIdentifier();
|
||||
|
||||
if (identifier == null) {
|
||||
throw new ExceptionReport("No process identifier supplied.", ExceptionReport.MISSING_PARAMETER_VALUE, "identifier");
|
||||
}
|
||||
|
||||
// check if the algorithm is in our repository
|
||||
if (!AlgorithmManager.getInstance().containsAlgorithm(identifier)) {
|
||||
throw new ExceptionReport("Specified process identifier does not exist", ExceptionReport.INVALID_PARAMETER_VALUE, "identifier=" + identifier);
|
||||
}
|
||||
|
||||
// validate if the process can be executed
|
||||
ProcessDescriptionType desc = null;
|
||||
try{
|
||||
desc = AlgorithmManager.getInstance().getProcessDescription(getAlgorithmIdentifier());
|
||||
}catch (Exception e) {}
|
||||
// We need a description of the inputs for the algorithm
|
||||
if (desc == null) {
|
||||
LOGGER.warn("desc == null");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get the inputdescriptions of the algorithm
|
||||
|
||||
if (desc.getDataInputs() != null) {
|
||||
InputDescriptionType[] inputDescs = desc.getDataInputs().getInputArray();
|
||||
|
||||
// prevent NullPointerException for zero input values in execute
|
||||
// request (if only default values are used)
|
||||
InputType[] inputs;
|
||||
if (getExecute().getDataInputs() == null)
|
||||
inputs = new InputType[0];
|
||||
else
|
||||
inputs = getExecute().getDataInputs().getInputArray();
|
||||
|
||||
// For each input supplied by the client
|
||||
for (InputType input : inputs) {
|
||||
boolean identifierMatched = false;
|
||||
// Try to match the input with one of the descriptions
|
||||
for (InputDescriptionType inputDesc : inputDescs) {
|
||||
// If found, then process:
|
||||
if (inputDesc.getIdentifier().getStringValue().equals(input.getIdentifier().getStringValue())) {
|
||||
identifierMatched = true;
|
||||
// If it is a literal value,
|
||||
if (input.getData() != null && input.getData().getLiteralData() != null) {
|
||||
// then check if the desription is also of type
|
||||
// literal
|
||||
if (inputDesc.getLiteralData() == null) {
|
||||
throw new ExceptionReport("Inputtype LiteralData is not supported", ExceptionReport.INVALID_PARAMETER_VALUE);
|
||||
}
|
||||
// literalValue.getDataType ist optional
|
||||
if (input.getData().getLiteralData().getDataType() != null) {
|
||||
if (inputDesc.getLiteralData() != null)
|
||||
if (inputDesc.getLiteralData().getDataType() != null)
|
||||
if (inputDesc.getLiteralData().getDataType().getReference() != null)
|
||||
if (!input.getData().getLiteralData().getDataType().equals(inputDesc.getLiteralData().getDataType().getReference())) {
|
||||
throw new ExceptionReport("Specified dataType is not supported " + input.getData().getLiteralData().getDataType() + " for input " + input.getIdentifier().getStringValue(), ExceptionReport.INVALID_PARAMETER_VALUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Excluded, because ProcessDescription validation
|
||||
// should be
|
||||
// done on startup!
|
||||
// else if (input.getComplexValue() != null) {
|
||||
// if(ParserFactory.getInstance().getParser(input.getComplexValue().getSchema())
|
||||
// == null) {
|
||||
// LOGGER.warn("Request validation message: schema
|
||||
// attribute
|
||||
// null, so the simple one will be used!");
|
||||
// }
|
||||
// }
|
||||
// else if (input.getComplexValueReference() != null) {
|
||||
// // we found a complexvalue input, try to get the
|
||||
// parser.
|
||||
// if(ParserFactory.getInstance().getParser(input.getComplexValueReference().getSchema())
|
||||
// == null) {
|
||||
// LOGGER.warn("Request validation message: schema
|
||||
// attribute
|
||||
// null, so the simple one will be used!");
|
||||
// }
|
||||
// }
|
||||
break;
|
||||
}
|
||||
}
|
||||
// if the identifier did not match one of the descriptions, it
|
||||
// is
|
||||
// invalid
|
||||
if (!identifierMatched) {
|
||||
throw new ExceptionReport("Input Identifier is not valid: " + input.getIdentifier().getStringValue(), ExceptionReport.INVALID_PARAMETER_VALUE, "input identifier");
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Actually serves the Request.
|
||||
*
|
||||
* @throws ExceptionReport
|
||||
*/
|
||||
String wpsid = null;
|
||||
private void initWpsID(){
|
||||
wpsid = getUniqueId().toString();
|
||||
}
|
||||
|
||||
public Response call() throws ExceptionReport {
|
||||
IAlgorithm algorithm = null;
|
||||
Map<String, List<IData>> inputMap = null;
|
||||
try {
|
||||
ExecutionContext context;
|
||||
if (getExecute().isSetResponseForm()) {
|
||||
context = getExecute().getResponseForm().isSetRawDataOutput() ? new ExecutionContext(getExecute().getResponseForm().getRawDataOutput()) : new ExecutionContext(Arrays.asList(getExecute().getResponseForm().getResponseDocument().getOutputArray()));
|
||||
} else {
|
||||
context = new ExecutionContext();
|
||||
}
|
||||
|
||||
// register so that any function that calls
|
||||
// ExecuteContextFactory.getContext() gets the instance registered
|
||||
// with this thread
|
||||
ExecutionContextFactory.registerContext(context);
|
||||
|
||||
LOGGER.debug("started with execution");
|
||||
|
||||
updateStatusStarted();
|
||||
|
||||
// parse the input
|
||||
InputType[] inputs = new InputType[0];
|
||||
if (getExecute().getDataInputs() != null) {
|
||||
inputs = getExecute().getDataInputs().getInputArray();
|
||||
}
|
||||
InputHandler parser = new InputHandler.Builder(inputs, getAlgorithmIdentifier()).build();
|
||||
|
||||
// we got so far:
|
||||
// get the algorithm, and run it with the clients input
|
||||
|
||||
/*
|
||||
* IAlgorithm algorithm =
|
||||
* AlgorithmManager.getInstance().getAlgorithm
|
||||
* (getAlgorithmIdentifier()); returnResults =
|
||||
* algorithm.run((Map)parser.getParsedInputLayers(),
|
||||
* (Map)parser.getParsedInputParameters());
|
||||
*/
|
||||
algorithm = AlgorithmManager.getInstance().getAlgorithm(getAlgorithmIdentifier());
|
||||
|
||||
if (algorithm instanceof ISubject) {
|
||||
ISubject subject = (ISubject) algorithm;
|
||||
subject.addObserver(this);
|
||||
|
||||
}
|
||||
if (algorithm instanceof AbstractEcologicalEngineMapper) {
|
||||
((AbstractEcologicalEngineMapper) algorithm).setWpsExternalID(wpsid);
|
||||
}
|
||||
|
||||
if (algorithm instanceof AbstractTransactionalAlgorithm) {
|
||||
returnResults = ((AbstractTransactionalAlgorithm) algorithm).run(execDom);
|
||||
} else {
|
||||
inputMap = parser.getParsedInputData();
|
||||
returnResults = algorithm.run(inputMap);
|
||||
}
|
||||
|
||||
List<String> errorList = algorithm.getErrors();
|
||||
if (errorList != null && !errorList.isEmpty()) {
|
||||
String errorMessage = errorList.get(0);
|
||||
LOGGER.error("Error reported while handling ExecuteRequest for " + getAlgorithmIdentifier() + ": " + errorMessage);
|
||||
updateStatusError(errorMessage);
|
||||
} else {
|
||||
updateStatusSuccess();
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
String errorMessage = null;
|
||||
if (algorithm != null && algorithm.getErrors() != null && !algorithm.getErrors().isEmpty()) {
|
||||
errorMessage = algorithm.getErrors().get(0);
|
||||
}
|
||||
if (errorMessage == null) {
|
||||
errorMessage = e.toString();
|
||||
}
|
||||
if (errorMessage == null) {
|
||||
errorMessage = "UNKNOWN ERROR";
|
||||
}
|
||||
LOGGER.error("Exception/Error while executing ExecuteRequest for " + getAlgorithmIdentifier() + ": " + errorMessage);
|
||||
updateStatusError(errorMessage);
|
||||
if (e instanceof Error) {
|
||||
// This is required when catching Error
|
||||
throw (Error) e;
|
||||
}
|
||||
if (e instanceof ExceptionReport) {
|
||||
throw (ExceptionReport) e;
|
||||
} else {
|
||||
throw new ExceptionReport("Error while executing the embedded process for: " + getAlgorithmIdentifier(), ExceptionReport.NO_APPLICABLE_CODE, e);
|
||||
}
|
||||
} finally {
|
||||
// you ***MUST*** call this or else you will have a PermGen
|
||||
// ClassLoader memory leak due to ThreadLocal use
|
||||
ExecutionContextFactory.unregisterContext();
|
||||
if (algorithm instanceof ISubject) {
|
||||
((ISubject) algorithm).removeObserver(this);
|
||||
}
|
||||
if (inputMap != null) {
|
||||
for (List<IData> l : inputMap.values()) {
|
||||
for (IData d : l) {
|
||||
if (d instanceof IComplexData) {
|
||||
((IComplexData) d).dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (returnResults != null) {
|
||||
for (IData d : returnResults.values()) {
|
||||
if (d instanceof IComplexData) {
|
||||
((IComplexData) d).dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ExecuteResponse response = new ExecuteResponse(this);
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the identifier of the algorithm the client requested
|
||||
*
|
||||
* @return An identifier
|
||||
*/
|
||||
public String getAlgorithmIdentifier() {
|
||||
// Fix for bug https://bugzilla.52north.org/show_bug.cgi?id=906
|
||||
if (getExecute().getIdentifier() != null) {
|
||||
return getExecute().getIdentifier().getStringValue();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Execute that is associated with this Request
|
||||
*
|
||||
* @return The Execute
|
||||
*/
|
||||
public Execute getExecute() {
|
||||
return execDom.getExecute();
|
||||
}
|
||||
|
||||
public Map<String, IData> getAttachedResult() {
|
||||
return returnResults;
|
||||
}
|
||||
|
||||
public boolean isStoreResponse() {
|
||||
if (execDom.getExecute().getResponseForm() == null) {
|
||||
return false;
|
||||
}
|
||||
if (execDom.getExecute().getResponseForm().getRawDataOutput() != null) {
|
||||
return false;
|
||||
}
|
||||
return execDom.getExecute().getResponseForm().getResponseDocument().getStoreExecuteResponse();
|
||||
}
|
||||
|
||||
public boolean isQuickStatus() {
|
||||
if (execDom.getExecute().getResponseForm() == null) {
|
||||
return false;
|
||||
}
|
||||
if (execDom.getExecute().getResponseForm().getRawDataOutput() != null) {
|
||||
return false;
|
||||
}
|
||||
return execDom.getExecute().getResponseForm().getResponseDocument().getStatus();
|
||||
}
|
||||
|
||||
public ExecuteResponseBuilder getExecuteResponseBuilder() {
|
||||
return this.execRespType;
|
||||
}
|
||||
|
||||
public boolean isRawData() {
|
||||
if (execDom.getExecute().getResponseForm() == null) {
|
||||
return false;
|
||||
}
|
||||
if (execDom.getExecute().getResponseForm().getRawDataOutput() != null) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public void update(ISubject subject) {
|
||||
Object state = subject.getState();
|
||||
LOGGER.info("Update received from Subject, state changed to : " + state);
|
||||
StatusType status = StatusType.Factory.newInstance();
|
||||
|
||||
int percentage = 0;
|
||||
if (state instanceof Integer) {
|
||||
percentage = (Integer) state;
|
||||
status.addNewProcessStarted().setPercentCompleted(percentage);
|
||||
} else if (state instanceof String) {
|
||||
status.addNewProcessStarted().setStringValue((String) state);
|
||||
}
|
||||
updateStatus(status);
|
||||
}
|
||||
|
||||
public void updateStatusAccepted() {
|
||||
StatusType status = StatusType.Factory.newInstance();
|
||||
status.setProcessAccepted("Process Accepted");
|
||||
updateStatus(status);
|
||||
}
|
||||
|
||||
public void updateStatusStarted() {
|
||||
StatusType status = StatusType.Factory.newInstance();
|
||||
status.addNewProcessStarted().setPercentCompleted(0);
|
||||
updateStatus(status);
|
||||
}
|
||||
|
||||
public void updateStatusSuccess() {
|
||||
StatusType status = StatusType.Factory.newInstance();
|
||||
status.setProcessSucceeded("Process successful");
|
||||
updateStatus(status);
|
||||
}
|
||||
|
||||
public void updateStatusError(String errorMessage) {
|
||||
StatusType status = StatusType.Factory.newInstance();
|
||||
net.opengis.ows.x11.ExceptionReportDocument.ExceptionReport excRep = status.addNewProcessFailed().addNewExceptionReport();
|
||||
excRep.setVersion("1.0.0");
|
||||
ExceptionType excType = excRep.addNewException();
|
||||
excType.addNewExceptionText().setStringValue(errorMessage);
|
||||
excType.setExceptionCode(ExceptionReport.NO_APPLICABLE_CODE);
|
||||
updateStatus(status);
|
||||
}
|
||||
|
||||
private void updateStatus(StatusType status) {
|
||||
getExecuteResponseBuilder().setStatus(status);
|
||||
try {
|
||||
getExecuteResponseBuilder().update();
|
||||
if (isStoreResponse()) {
|
||||
ExecuteResponse executeResponse = new ExecuteResponse(this);
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = executeResponse.getAsStream();
|
||||
DatabaseFactory.getDatabase().storeResponse(wpsid, is);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
}
|
||||
} catch (ExceptionReport e) {
|
||||
LOGGER.error("Update of process status failed.", e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void storeRequest(ExecuteDocument executeDocument) {
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = executeDocument.newInputStream();
|
||||
DatabaseFactory.getDatabase().insertRequest(wpsid, is, true);
|
||||
//DatabaseFactory.getDatabase().insertRequest(wpsid, is, false);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Exception storing ExecuteRequest", e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
}
|
||||
|
||||
private void storeRequest(CaseInsensitiveMap map) {
|
||||
|
||||
BufferedWriter w = null;
|
||||
ByteArrayOutputStream os = null;
|
||||
ByteArrayInputStream is = null;
|
||||
try {
|
||||
os = new ByteArrayOutputStream();
|
||||
w = new BufferedWriter(new OutputStreamWriter(os));
|
||||
for (Object key : map.keySet()) {
|
||||
Object value = map.get(key);
|
||||
String valueString = "";
|
||||
if (value instanceof String[]) {
|
||||
valueString = ((String[]) value)[0];
|
||||
} else {
|
||||
valueString = value.toString();
|
||||
}
|
||||
w.append(key.toString()).append('=').append(valueString);
|
||||
w.newLine();
|
||||
}
|
||||
w.flush();
|
||||
is = new ByteArrayInputStream(os.toByteArray());
|
||||
DatabaseFactory.getDatabase().insertRequest(wpsid, is, false);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Exception storing ExecuteRequest", e);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(w);
|
||||
IOUtils.closeQuietly(os);
|
||||
IOUtils.closeQuietly(is);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,6 +3,8 @@ package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.web;
|
|||
import java.io.InputStream;
|
||||
|
||||
import org.n52.wps.server.ExceptionReport;
|
||||
import org.n52.wps.server.request.ExecuteRequest;
|
||||
import org.n52.wps.server.response.ExecuteResponseBuilder;
|
||||
import org.n52.wps.server.response.Response;
|
||||
|
||||
public class ExecuteResponse extends Response {
|
||||
|
|
|
@ -1,398 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.web;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.net.Inet4Address;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Calendar;
|
||||
|
||||
import javax.xml.XMLConstants;
|
||||
import javax.xml.namespace.QName;
|
||||
|
||||
import net.opengis.ows.x11.DomainMetadataType;
|
||||
import net.opengis.ows.x11.LanguageStringType;
|
||||
import net.opengis.wps.x100.DataInputsType;
|
||||
import net.opengis.wps.x100.DocumentOutputDefinitionType;
|
||||
import net.opengis.wps.x100.ExecuteResponseDocument;
|
||||
import net.opengis.wps.x100.ExecuteResponseDocument.ExecuteResponse;
|
||||
import net.opengis.wps.x100.OutputDefinitionType;
|
||||
import net.opengis.wps.x100.OutputDescriptionType;
|
||||
import net.opengis.wps.x100.ProcessDescriptionType;
|
||||
import net.opengis.wps.x100.StatusType;
|
||||
|
||||
import org.apache.xmlbeans.XmlCursor;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.AlgorithmManager;
|
||||
import org.n52.wps.commons.WPSConfig;
|
||||
import org.n52.wps.io.data.IBBOXData;
|
||||
import org.n52.wps.io.data.IData;
|
||||
import org.n52.wps.server.ExceptionReport;
|
||||
import org.n52.wps.server.WebProcessingService;
|
||||
import org.n52.wps.server.request.Request;
|
||||
import org.n52.wps.server.response.OutputDataItem;
|
||||
import org.n52.wps.server.response.RawData;
|
||||
import org.n52.wps.util.XMLBeansHelper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* WPS Execute operation response. By default, this XML document is delivered to the client in response to an Execute request. If "status" is "false" in the Execute operation request, this document is normally returned when process execution has been completed.
|
||||
* If "status" in the Execute request is "true", this response shall be returned as soon as the Execute request has been accepted for processing. In this case, the same XML document is also made available as a web-accessible resource from the URL identified in the statusLocation, and the WPS server shall repopulate it once the process has completed. It may repopulate it on an ongoing basis while the process is executing.
|
||||
* However, the response to an Execute request will not include this element in the special case where the output is a single complex value result and the Execute request indicates that "store" is "false".
|
||||
* Instead, the server shall return the complex result (e.g., GIF image or GML) directly, without encoding it in the ExecuteResponse. If processing fails in this special case, the normal ExecuteResponse shall be sent, with the error condition indicated. This option is provided to simplify the programming required for simple clients and for service chaining.
|
||||
* @author Timon ter Braak
|
||||
*
|
||||
*/
|
||||
public class ExecuteResponseBuilder {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ExecuteResponseBuilder.class);
|
||||
|
||||
private String identifier;
|
||||
private DataInputsType dataInputs;
|
||||
//private DocumentOutputDefinitionType[] outputDefs;
|
||||
private ExecuteRequest request;
|
||||
private ExecuteResponseDocument doc;
|
||||
private RawData rawDataHandler = null;
|
||||
private ProcessDescriptionType description;
|
||||
private Calendar creationTime;
|
||||
String webPath;
|
||||
String webStatus;
|
||||
|
||||
|
||||
|
||||
public ExecuteResponseBuilder(ExecuteRequest request) throws ExceptionReport{
|
||||
//System.out.println("Building Doc");
|
||||
this.request = request;
|
||||
doc = ExecuteResponseDocument.Factory.newInstance();
|
||||
doc.addNewExecuteResponse();
|
||||
XmlCursor c = doc.newCursor();
|
||||
c.toFirstChild();
|
||||
c.toLastAttribute();
|
||||
c.setAttributeText(new QName(XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, "schemaLocation"), "http://www.opengis.net/wps/1.0.0 http://schemas.opengis.net/wps/1.0.0/wpsExecute_response.xsd");
|
||||
|
||||
String webapp = WPSConfig.getInstance().getWPSConfig().getServer().getWebappPath();
|
||||
if (webapp == null)
|
||||
webapp = "wps";
|
||||
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
||||
if (host.toLowerCase().equals("localhost"))
|
||||
try {
|
||||
host = Inet4Address.getLocalHost().getHostAddress();
|
||||
} catch (UnknownHostException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
String port = WPSConfig.getInstance().getWPSConfig().getServer().getHostport();
|
||||
logger.debug("Host: " + host + " Port: " + port + " Webapp: " + webapp + " ");
|
||||
webPath = "http://" + host + ":" + port + "/" + webapp + "/WebProcessingService";
|
||||
webStatus = "http://" + host + ":" + port + "/" + webapp + "/RetrieveResultServlet";
|
||||
|
||||
//statistical-manager-new.d4science.org:8080/wps/WebProcessingService?Request=GetCapabilities&Service=WPS
|
||||
|
||||
// doc.getExecuteResponse().setServiceInstance(webPath+"?REQUEST=GetCapabilities&SERVICE=WPS");
|
||||
doc.getExecuteResponse().setServiceInstance(webPath);
|
||||
doc.getExecuteResponse().setLang(WebProcessingService.DEFAULT_LANGUAGE);
|
||||
doc.getExecuteResponse().setService("WPS");
|
||||
doc.getExecuteResponse().setVersion(Request.SUPPORTED_VERSION);
|
||||
|
||||
|
||||
|
||||
this.identifier = request.getExecute().getIdentifier().getStringValue().trim();
|
||||
logger.debug("Identifier: "+identifier);
|
||||
ExecuteResponse responseElem = doc.getExecuteResponse();
|
||||
responseElem.addNewProcess().addNewIdentifier().setStringValue(identifier);
|
||||
|
||||
logger.debug("Getting description for {}",identifier);
|
||||
|
||||
try{
|
||||
description = AlgorithmManager.getInstance().getProcessDescription(this.identifier);
|
||||
}catch(Exception e){
|
||||
logger.error("Error while accessing the process description for "+ request.getExecute().getIdentifier().getStringValue(),e);
|
||||
throw new RuntimeException("Error while accessing the process description for "+ request.getExecute().getIdentifier().getStringValue(),e);
|
||||
}
|
||||
logger.debug("Description {} ",description);
|
||||
|
||||
responseElem.getProcess().setTitle(description.getTitle());
|
||||
responseElem.getProcess().setProcessVersion(description.getProcessVersion());
|
||||
creationTime = Calendar.getInstance();
|
||||
logger.debug("Execute Response Created!");
|
||||
}
|
||||
|
||||
public void update() throws ExceptionReport {
|
||||
// copying the request parameters to the response
|
||||
ExecuteResponse responseElem = doc.getExecuteResponse();
|
||||
|
||||
// if status succeeded, update reponse with result
|
||||
if (responseElem.getStatus().isSetProcessSucceeded()) {
|
||||
// the response only include dataInputs, if the property is set to true;
|
||||
//if(Boolean.getBoolean(WPSConfiguration.getInstance().getProperty(WebProcessingService.PROPERTY_NAME_INCLUDE_DATAINPUTS_IN_RESPONSE))) {
|
||||
if(new Boolean(WPSConfig.getInstance().getWPSConfig().getServer().getIncludeDataInputsInResponse())){
|
||||
dataInputs = request.getExecute().getDataInputs();
|
||||
responseElem.setDataInputs(dataInputs);
|
||||
}
|
||||
responseElem.addNewProcessOutputs();
|
||||
// has the client specified the outputs?
|
||||
if (request.getExecute().isSetResponseForm()) {
|
||||
// Get the outputdescriptions from the algorithm
|
||||
|
||||
OutputDescriptionType[] outputDescs = description.getProcessOutputs().getOutputArray();
|
||||
if(request.isRawData()) {
|
||||
OutputDefinitionType rawDataOutput = request.getExecute().getResponseForm().getRawDataOutput();
|
||||
String id = rawDataOutput.getIdentifier().getStringValue();
|
||||
OutputDescriptionType desc = XMLBeansHelper.findOutputByID(id, outputDescs);
|
||||
if(desc.isSetComplexOutput()) {
|
||||
String encoding = ExecuteResponseBuilder.getEncoding(desc, rawDataOutput);
|
||||
String schema = ExecuteResponseBuilder.getSchema(desc, rawDataOutput);
|
||||
String responseMimeType = getMimeType(rawDataOutput);
|
||||
generateComplexDataOutput(id, false, true, schema, responseMimeType, encoding, null);
|
||||
}
|
||||
|
||||
else if (desc.isSetLiteralOutput()) {
|
||||
String mimeType = null;
|
||||
String schema = null;
|
||||
String encoding = null;
|
||||
DomainMetadataType dataType = desc.getLiteralOutput().getDataType();
|
||||
String reference = dataType != null ? dataType.getReference() : null;
|
||||
generateLiteralDataOutput(id, doc, true, reference, schema, mimeType, encoding, desc.getTitle());
|
||||
}
|
||||
else if (desc.isSetBoundingBoxOutput()) {
|
||||
generateBBOXOutput(id, doc, true, desc.getTitle());
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Get the outputdefinitions from the clients request
|
||||
// For each request of output
|
||||
for(int i = 0; i<request.getExecute().getResponseForm().getResponseDocument().getOutputArray().length; i++) {
|
||||
OutputDefinitionType definition = request.getExecute().getResponseForm().getResponseDocument().getOutputArray(i);
|
||||
DocumentOutputDefinitionType documentDef = request.getExecute().getResponseForm().getResponseDocument().getOutputArray(i);
|
||||
String responseID = definition.getIdentifier().getStringValue();
|
||||
OutputDescriptionType desc = XMLBeansHelper.findOutputByID(responseID, outputDescs);
|
||||
if(desc==null){
|
||||
throw new ExceptionReport("Could not find the output id " + responseID, ExceptionReport.INVALID_PARAMETER_VALUE);
|
||||
}
|
||||
if(desc.isSetComplexOutput()) {
|
||||
String mimeType = getMimeType(definition);
|
||||
String schema = ExecuteResponseBuilder.getSchema(desc, definition);
|
||||
String encoding = ExecuteResponseBuilder.getEncoding(desc, definition);
|
||||
generateComplexDataOutput(responseID, documentDef.getAsReference(), false, schema, mimeType, encoding, desc.getTitle());
|
||||
}
|
||||
else if (desc.isSetLiteralOutput()) {
|
||||
String mimeType = null;
|
||||
String schema = null;
|
||||
String encoding = null;
|
||||
DomainMetadataType dataType = desc.getLiteralOutput().getDataType();
|
||||
String reference = dataType != null ? dataType.getReference() : null;
|
||||
generateLiteralDataOutput(responseID, doc, false, reference, schema, mimeType, encoding, desc.getTitle());
|
||||
}
|
||||
else if (desc.isSetBoundingBoxOutput()) {
|
||||
generateBBOXOutput(responseID, doc, false, desc.getTitle());
|
||||
}
|
||||
else{
|
||||
throw new ExceptionReport("Requested type not supported: BBOX", ExceptionReport.INVALID_PARAMETER_VALUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
logger.info("OutputDefinitions are not stated explicitly in request");
|
||||
|
||||
// THIS IS A WORKAROUND AND ACTUALLY NOT COMPLIANT TO THE SPEC.
|
||||
|
||||
ProcessDescriptionType description ;
|
||||
try{
|
||||
description = AlgorithmManager.getInstance().getProcessDescription(request.getExecute().getIdentifier().getStringValue());
|
||||
}catch(Exception e){
|
||||
logger.error("Error while accessing the process description for "+ request.getExecute().getIdentifier().getStringValue(),e);
|
||||
throw new RuntimeException("Error while accessing the process description for "+ request.getExecute().getIdentifier().getStringValue(),e);
|
||||
}
|
||||
|
||||
OutputDescriptionType [] d = description.getProcessOutputs().getOutputArray();
|
||||
for (int i = 0; i < d.length; i++)
|
||||
{
|
||||
if(d[i].isSetComplexOutput()) {
|
||||
String schema = d[i].getComplexOutput().getDefault().getFormat().getSchema();
|
||||
String encoding = d[i].getComplexOutput().getDefault().getFormat().getEncoding();
|
||||
String mimeType = d[i].getComplexOutput().getDefault().getFormat().getMimeType();
|
||||
generateComplexDataOutput(d[i].getIdentifier().getStringValue(), false, false, schema, mimeType, encoding, d[i].getTitle());
|
||||
}
|
||||
else if(d[i].isSetLiteralOutput()) {
|
||||
generateLiteralDataOutput(d[i].getIdentifier().getStringValue(), doc, false, d[i].getLiteralOutput().getDataType().getReference(), null, null, null, d[i].getTitle());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if(request.isStoreResponse()) {
|
||||
//statusLocation="http://localhost:8080/wps/RetrieveResultServlet?id=e4defcf6-d39f-48bf-8128-5591dca13dcb"
|
||||
|
||||
// responseElem.setStatusLocation(DatabaseFactory.getDatabase().generateRetrieveResultURL((request.getUniqueId()).toString()));
|
||||
responseElem.setStatusLocation(webStatus+"?id="+request.getUniqueId());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Returns the schema according to the given output description and type.
|
||||
*/
|
||||
private static String getSchema(OutputDescriptionType desc, OutputDefinitionType def) {
|
||||
String schema = null;
|
||||
if(def != null) {
|
||||
schema = def.getSchema();
|
||||
}
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
private static String getEncoding(OutputDescriptionType desc, OutputDefinitionType def) {
|
||||
String encoding = null;
|
||||
if(def != null) {
|
||||
encoding = def.getEncoding();
|
||||
}
|
||||
return encoding;
|
||||
}
|
||||
|
||||
public String getMimeType() {
|
||||
return getMimeType(null);
|
||||
}
|
||||
|
||||
public String getMimeType(OutputDefinitionType def) {
|
||||
|
||||
String mimeType = "";
|
||||
OutputDescriptionType[] outputDescs = description.getProcessOutputs()
|
||||
.getOutputArray();
|
||||
|
||||
boolean isResponseForm = request.getExecute().isSetResponseForm();
|
||||
|
||||
String inputID = "";
|
||||
|
||||
if(def != null){
|
||||
inputID = def.getIdentifier().getStringValue();
|
||||
}else if(isResponseForm){
|
||||
|
||||
if (request.getExecute().getResponseForm().isSetRawDataOutput()) {
|
||||
inputID = request.getExecute().getResponseForm().getRawDataOutput()
|
||||
.getIdentifier().getStringValue();
|
||||
} else if (request.getExecute().getResponseForm()
|
||||
.isSetResponseDocument()) {
|
||||
inputID = request.getExecute().getResponseForm()
|
||||
.getResponseDocument().getOutputArray(0).getIdentifier()
|
||||
.getStringValue();
|
||||
}
|
||||
}
|
||||
|
||||
OutputDescriptionType outputDes = null;
|
||||
|
||||
for (OutputDescriptionType tmpOutputDes : outputDescs) {
|
||||
if (inputID.equalsIgnoreCase(tmpOutputDes.getIdentifier()
|
||||
.getStringValue())) {
|
||||
outputDes = tmpOutputDes;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (isResponseForm) {
|
||||
// Get the outputdescriptions from the algorithm
|
||||
if (request.isRawData()) {
|
||||
mimeType = request.getExecute().getResponseForm()
|
||||
.getRawDataOutput().getMimeType();
|
||||
} else {
|
||||
// mimeType = "text/xml";
|
||||
// MSS 03/02/2009 defaulting to text/xml doesn't work when the
|
||||
// data is a complex raster
|
||||
if (outputDes.isSetLiteralOutput()) {
|
||||
mimeType = "text/plain";
|
||||
} else if(outputDes.isSetBoundingBoxOutput()){
|
||||
mimeType = "text/xml";
|
||||
} else {
|
||||
if (def != null) {
|
||||
mimeType = def.getMimeType();
|
||||
} else {
|
||||
if (outputDes.isSetComplexOutput()) {
|
||||
mimeType = outputDes.getComplexOutput()
|
||||
.getDefault().getFormat().getMimeType();
|
||||
logger.warn("Using default mime type: "
|
||||
+ mimeType
|
||||
+ " for input: "
|
||||
+ inputID);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (mimeType == null) {
|
||||
if (outputDes.isSetLiteralOutput()) {
|
||||
mimeType = "text/plain";
|
||||
} else if(outputDes.isSetBoundingBoxOutput()){
|
||||
mimeType = "text/xml";
|
||||
} else if (outputDes.isSetComplexOutput()) {
|
||||
mimeType = outputDes.getComplexOutput().getDefault()
|
||||
.getFormat().getMimeType();
|
||||
logger.warn("Using default mime type: " + mimeType
|
||||
+ " for input: "
|
||||
+ inputID);
|
||||
}
|
||||
}
|
||||
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
private void generateComplexDataOutput(String responseID, boolean asReference, boolean rawData, String schema, String mimeType, String encoding, LanguageStringType title) throws ExceptionReport{
|
||||
IData obj = request.getAttachedResult().get(responseID);
|
||||
if(rawData) {
|
||||
rawDataHandler = new RawData(obj, responseID, schema, encoding, mimeType, this.identifier, description);
|
||||
}
|
||||
else {
|
||||
OutputDataItem handler = new OutputDataItem(obj, responseID, schema, encoding, mimeType, title, this.identifier, description);
|
||||
if(asReference) {
|
||||
handler.updateResponseAsReference(doc, (request.getUniqueId()).toString(),mimeType);
|
||||
}
|
||||
else {
|
||||
handler.updateResponseForInlineComplexData(doc);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void generateLiteralDataOutput(String responseID, ExecuteResponseDocument res, boolean rawData, String dataTypeReference, String schema, String mimeType, String encoding, LanguageStringType title) throws ExceptionReport {
|
||||
IData obj = request.getAttachedResult().get(responseID);
|
||||
if(rawData) {
|
||||
rawDataHandler = new RawData(obj, responseID, schema, encoding, mimeType, this.identifier, description);
|
||||
}else{
|
||||
OutputDataItem handler = new OutputDataItem(obj, responseID, schema, encoding, mimeType, title, this.identifier, description);
|
||||
handler.updateResponseForLiteralData(res, dataTypeReference);
|
||||
}
|
||||
}
|
||||
|
||||
private void generateBBOXOutput(String responseID, ExecuteResponseDocument res, boolean rawData, LanguageStringType title) throws ExceptionReport {
|
||||
IBBOXData obj = (IBBOXData) request.getAttachedResult().get(responseID);
|
||||
if(rawData) {
|
||||
rawDataHandler = new RawData(obj, responseID, null, null, null, this.identifier, description);
|
||||
}else{
|
||||
OutputDataItem handler = new OutputDataItem(obj, responseID, null, null, null, title, this.identifier, description);
|
||||
handler.updateResponseForBBOXData(res, obj);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public InputStream getAsStream() throws ExceptionReport{
|
||||
if(request.isRawData() && rawDataHandler != null) {
|
||||
return rawDataHandler.getAsStream();
|
||||
}
|
||||
if(request.isStoreResponse()) {
|
||||
String id = request.getUniqueId().toString();
|
||||
|
||||
// String statusLocation = DatabaseFactory.getDatabase().generateRetrieveResultURL(id);
|
||||
String statusLocation = webStatus+"?id="+id;
|
||||
doc.getExecuteResponse().setStatusLocation(statusLocation);
|
||||
}
|
||||
try {
|
||||
return doc.newInputStream(XMLBeansHelper.getXmlOptions());
|
||||
}
|
||||
catch(Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void setStatus(StatusType status) {
|
||||
//workaround, should be generated either at the creation of the document or when the process has been finished.
|
||||
status.setCreationTime(creationTime);
|
||||
doc.getExecuteResponse().setStatus(status);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -49,6 +49,7 @@ import org.n52.wps.server.WebProcessingService;
|
|||
import org.n52.wps.server.handler.RequestExecutor;
|
||||
import org.n52.wps.server.request.CapabilitiesRequest;
|
||||
import org.n52.wps.server.request.DescribeProcessRequest;
|
||||
import org.n52.wps.server.request.ExecuteRequest;
|
||||
import org.n52.wps.server.request.Request;
|
||||
import org.n52.wps.server.request.RetrieveResultRequest;
|
||||
import org.n52.wps.server.response.Response;
|
||||
|
|
|
@ -1,78 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<WPSConfiguration xmlns="http://n52.org/wps">
|
||||
<Datahandlers>
|
||||
<ParserList>
|
||||
<Parser name="WCPSQueryParser" className="org.n52.wps.io.datahandler.parser.WCPSQueryParser" active="true">
|
||||
<Format mimetype="text/plain" schema="http://schemas.opengis.net/wcps/1.0/wcpsAll.xsd"/>
|
||||
</Parser>
|
||||
<Parser name="WKTParser" className="org.n52.wps.io.datahandler.parser.WKTParser" active="true">
|
||||
<Format mimetype="application/wkt"/>
|
||||
</Parser>
|
||||
<Parser name="GenericXMLDataParser" className="org.n52.wps.io.datahandler.parser.GenericXMLDataParser" active="true">
|
||||
<Format mimetype="text/xml; subtype=gml/2.1.2" schema="http://schemas.opengis.net/gml/2.1.2/feature.xsd"/>
|
||||
<Format mimetype="text/xml"/>
|
||||
</Parser>
|
||||
<Parser name="GenericFileParser" className="org.n52.wps.io.datahandler.parser.GenericFileParser" active="true">
|
||||
<Format mimetype="text/csv"/>
|
||||
<Format mimetype="text/plain"/>
|
||||
</Parser>
|
||||
<Parser name="GisLinkParser" className="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.GisLinkParser" active="true">
|
||||
<Format mimetype="application/geotiff"/>
|
||||
<Format mimetype="application/wcs"/>
|
||||
<Format mimetype="application/asc"/>
|
||||
<Format mimetype="text/plain"/>
|
||||
<Format mimetype="application/wfs"/>
|
||||
<Format mimetype="application/opendap"/>
|
||||
</Parser>
|
||||
</ParserList>
|
||||
<GeneratorList>
|
||||
<Generator name="WKTGenerator" className="org.n52.wps.io.datahandler.generator.WKTGenerator" active="true">
|
||||
<Format mimetype="application/wkt"/>
|
||||
</Generator>
|
||||
<Generator name="GenericXMLDataGenerator" className="org.n52.wps.io.datahandler.generator.GenericXMLDataGenerator" active="true">
|
||||
<Format mimetype="text/xml; subtype=gml/2.1.2" schema="http://schemas.opengis.net/gml/2.1.2/feature.xsd"/>
|
||||
</Generator>
|
||||
<Generator name="GenericFileGenerator" className="org.n52.wps.io.datahandler.generator.GenericFileGenerator" active="true">
|
||||
<Format mimetype="text/plain"/>
|
||||
</Generator>
|
||||
<Generator name="PngFileGenerator" className="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.PngFileGenerator" active="true">
|
||||
<Format mimetype="image/png"/>
|
||||
</Generator>
|
||||
<Generator name="GifFileGenerator" className="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.GifFileGenerator" active="true">
|
||||
<Format mimetype="image/gif"/>
|
||||
</Generator>
|
||||
<Generator name="D4ScienceFileGenerator" className="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.D4ScienceFileGenerator" active="true">
|
||||
<Format mimetype="application/d4science"/>
|
||||
</Generator>
|
||||
<Generator name="CsvFileGenerator" className="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.CsvFileGenerator" active="true">
|
||||
<Format mimetype="text/csv"/>
|
||||
</Generator>
|
||||
<Generator name="GisLinkGenerator" className="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.GisLinkGenerator" active="true">
|
||||
<Format mimetype="application/wms"/>
|
||||
</Generator>
|
||||
</GeneratorList>
|
||||
</Datahandlers>
|
||||
<AlgorithmRepositoryList>
|
||||
<Repository name="LocalAlgorithmRepository" className="org.n52.wps.server.LocalAlgorithmRepository" active="true">
|
||||
<Property name="Algorithm" active="true">org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.BIONYM_LOCAL</Property>
|
||||
<Property name="Algorithm" active="true">org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.BIOCLIMATE_HCAF</Property>
|
||||
<Property name="Algorithm" active="true">org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.PRESENCE_CELLS_GENERATION</Property>
|
||||
<Property name="Algorithm" active="true">org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.HCAF_INTERPOLATION</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.demo.GPDemoXML</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.demo.TestIO</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.server.algorithm.JTSConvexHullAlgorithm</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.server.algorithm.test.DummyTestClass</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.server.algorithm.test.EchoProcess</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.server.algorithm.test.LongRunningDummyTestClass</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.server.algorithm.test.MultiReferenceBinaryInputAlgorithm</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.server.algorithm.test.MultiReferenceInputAlgorithm</Property>
|
||||
<Property name="Algorithm" active="true">org.n52.wps.server.algorithm.test.MultipleComplexInAndOutputsDummyTestClass</Property>
|
||||
</Repository>
|
||||
<Repository name="UploadedAlgorithmRepository" className="org.n52.wps.server.UploadedAlgorithmRepository" active="false"/>
|
||||
<Repository name="ServiceLoaderAlgorithmRepository" className="org.n52.wps.server.ServiceLoaderAlgorithmRepository" active="true"/>
|
||||
</AlgorithmRepositoryList>
|
||||
<RemoteRepositoryList/>
|
||||
<Server protocol="http" hostname="localhost" hostport="8080" includeDataInputsInResponse="false" computationTimeoutMilliSeconds="5" cacheCapabilites="false" webappPath="wps" repoReloadInterval="0.0" minPoolSize="10" maxPoolSize="20" keepAliveSeconds="1000" maxQueuedTasks="100">
|
||||
<Database/>
|
||||
</Server>
|
||||
</WPSConfiguration>
|
|
@ -0,0 +1,83 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.commons.httpclient.HttpClient;
|
||||
import org.apache.commons.httpclient.methods.GetMethod;
|
||||
import org.junit.Test;
|
||||
|
||||
public class AlgorithmTest {
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void executeAlgorithmsFromFile() throws Exception{
|
||||
String protocol = "http";
|
||||
String hostname = "dataminer2-d-d4s.d4science.org";
|
||||
String token = "52b59669-ccde-46d2-a4da-108b9e941f7c-98187548";
|
||||
|
||||
Iterator<String> uris = getUrisIterator();
|
||||
|
||||
HttpClient client = new HttpClient();
|
||||
String algorithmName="NOTHING";
|
||||
while (uris.hasNext()){
|
||||
String nextLine = uris.next();
|
||||
|
||||
if (nextLine.startsWith("#"))
|
||||
algorithmName = nextLine;
|
||||
else{
|
||||
String callUrl = nextLine.replace("{PROTOCOL}", protocol).replace("{HOST}", hostname).replace("{TOKEN}", token);
|
||||
try{
|
||||
URL url = new URL(callUrl);
|
||||
URI uri = new URI(url.getProtocol(), url.getUserInfo(), url.getHost(), url.getPort(), url.getPath(), url.getQuery(), url.getRef());
|
||||
GetMethod get = new GetMethod(uri.toString());
|
||||
client.executeMethod(get);
|
||||
if(get.getStatusCode()!=200)
|
||||
System.out.println("algorithm "+algorithmName+" returned status "+get.getStatusCode()+" with url "+uri);
|
||||
} catch(Exception e){
|
||||
System.out.println("invalid url");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Iterator<String> getUrisIterator() throws Exception{
|
||||
|
||||
return new Iterator<String>(){
|
||||
|
||||
private String line= null;
|
||||
private BufferedReader buffer=new BufferedReader(new InputStreamReader(this.getClass().getResourceAsStream("/AlgorithmTestURIs.txt")));
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
try {
|
||||
line = buffer.readLine();
|
||||
} catch (IOException e) {
|
||||
System.out.println("error reading buffer");
|
||||
}
|
||||
if(line==null){
|
||||
try {
|
||||
if (buffer!=null)
|
||||
buffer.close();
|
||||
} catch (IOException e) {
|
||||
System.out.println("error closing buffer");
|
||||
}
|
||||
return false;
|
||||
} else return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String next() {
|
||||
return line;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -4,13 +4,13 @@ import java.io.File;
|
|||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.executor.tests.CustomRegressor;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.DatabaseInfo;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.InfrastructureDialoguer;
|
||||
|
||||
public class CreateTestTableFromFile {
|
||||
public static void main(String[] args) throws Exception{
|
||||
CustomRegressor regressor = new CustomRegressor();
|
||||
Regressor regressor = new Regressor();
|
||||
AlgorithmConfiguration config = regressor.getConfig();
|
||||
|
||||
String tableName = "hcaf_2050";
|
||||
|
|
|
@ -4,7 +4,7 @@ import java.io.File;
|
|||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
import org.gcube.dataanalysis.executor.tests.CustomRegressor;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.DatabaseInfo;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.InfrastructureDialoguer;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.InputsManager;
|
||||
|
@ -12,7 +12,7 @@ import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.InputsM
|
|||
public class DumpTestTables {
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
CustomRegressor regressor = new CustomRegressor();
|
||||
Regressor regressor = new Regressor();
|
||||
AlgorithmConfiguration config = regressor.getConfig();
|
||||
|
||||
String tableName = "spread_test";
|
||||
|
|
|
@ -0,0 +1,200 @@
|
|||
#GET_CAPABILITIES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token={TOKEN}
|
||||
#ESRI_GRID_EXTRACTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION&DataInputs=Layer=483a4a32-729e-422b-b5e4-49f27ba93ec2;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
#DBSCAN
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;epsilon=10;min_points=1;OccurrencePointsTable={PROTOCOL}://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
#KMEANS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;k=3;max_runs=100;min_points=1;max_optimization_steps=10;OccurrencePointsTable={PROTOCOL}://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
#LOF
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF&DataInputs=FeaturesColumnNames=depthmean|sstmnmax|salinitymean;PointsClusterLabel=OccClustersTest;minimal_points_lower_bound=2;PointsTable={PROTOCOL}://goo.gl/VDzpch;minimal_points_upper_bound=10;distance_function=euclidian distance;lof_threshold=2;
|
||||
#XMEANS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;min_points=1;maxIterations=100;minClusters=1;maxClusters=3;OccurrencePointsTable={PROTOCOL}://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
#BIONYM
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;RawTaxaNamesTable={PROTOCOL}://goo.gl/N9e3pC;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=species;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
#BIONYM_LOCAL
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;SpeciesAuthorName=Gadus morhua
|
||||
#ABSENCE CELLS FROM AQUAMAPS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Aquamaps_HSPEC={PROTOCOL}://goo.gl/24XrmE;Take_Randomly=true;Species_Code=Fis-30189;
|
||||
#HCAF_FILTER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER&DataInputs=B_Box_Left_Lower_Lat=-17;B_Box_Right_Upper_Long=147;B_Box_Right_Upper_Lat=25;B_Box_Left_Lower_Long=89;Table_Label=wps_hcaf_filter;
|
||||
#MAX_ENT_NICHE_MODELLING
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING&DataInputs=LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;Z=0;Layers=483a4a32-729e-422b-b5e4-49f27ba93ec2;TimeIndex=0;MaxIterations=100;SpeciesName=Latimeria chalumnae;DefaultPrevalence=0.5;YResolution=0.5;OccurrencesTable={PROTOCOL}://goo.gl/5cnKKp;XResolution=0.5;OutputTableLabel=wps_maxent;
|
||||
#OCCURRENCE_ENRICHMENT
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT&DataInputs=OptionalFilter= ;OutputTableName=wps_enriched;FeaturesNames=temp;OccurrenceTable={PROTOCOL}://goo.gl/ZfFcfE;LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;ScientificNameColumn=scientificname;Layers=483a4a32-729e-422b-b5e4-49f27ba93ec2;TimeColumn=eventdate;Resolution=0.5;
|
||||
#PRESENCE_CELLS_GENERATION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Species_Code=Fis-30189;
|
||||
#FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR&DataInputs=Resolution=5;Latitude_Column=decimallatitude;InputTable={PROTOCOL}://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_fao_area_column;
|
||||
#FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT&DataInputs=Resolution=5;InputTable={PROTOCOL}://goo.gl/yJTIBZ;Longitude_Column=centerlong;Quadrant_Column=quadrant;OutputTableName=wps_fao_quadrant;Latitude_Column=centerlat;
|
||||
#CSQUARE_COLUMN_CREATOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR&DataInputs=CSquare_Resolution=0.1;Latitude_Column=decimallatitude;InputTable={PROTOCOL}://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_csquare_column;
|
||||
#GENERIC_CHARTS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS&DataInputs=Quantities=fvalue;InputTable={PROTOCOL}://goo.gl/lWTvcw;TopElementsNumber=10;Attributes=x|y
|
||||
#GEO_CHART
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART&DataInputs=Latitude=y;Quantities=x;Longitude=x;InputTable={PROTOCOL}://goo.gl/lWTvcw
|
||||
#TIME_GEO_CHART
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART&DataInputs=Time=time;Latitude=x;Longitude=y;Quantities=fvalue;InputTable={PROTOCOL}://goo.gl/lWTvcw;
|
||||
#TIME_SERIES_CHARTS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS&DataInputs=Attributes=x|y|z;Quantities=fvalue;InputTable={PROTOCOL}://goo.gl/lWTvcw;Time=time
|
||||
#OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR&DataInputs=Selected species=Gadus morhua;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
#OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_LME_AREA_PER_YEAR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_LME_AREA_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
#OBIS_MOST_OBSERVED_SPECIES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_SPECIES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_SPECIES&DataInputs=Species_number=10;End_year=2015;Start_year=2000;
|
||||
#OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TAXONOMY_OBSERVATIONS_TREND_PER_YEAR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TAXONOMY_OBSERVATIONS_TREND_PER_YEAR&DataInputs=Level=GENUS;Selected taxonomy=Gadus|Merluccius;End_year=2015;Start_year=2000;
|
||||
#OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_TREND_PER_YEAR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_TREND_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;
|
||||
#OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_PER_AREA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_PER_AREA&DataInputs=Species=Gadus morhua;Area=LME;End_year=2015;Start_year=2000;
|
||||
#OBIS_MOST_OBSERVED_TAXA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_TAXA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_TAXA&DataInputs=Level=GENUS;Taxa_number=10;End_year=2015;Start_year=2000;
|
||||
#TIME_SERIES_ANALYSIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS&DataInputs=FFT_Window_Samples=12;SSA_Points_to_Forecast=3;AggregationFunction=SUM;TimeSeriesTable={PROTOCOL}://goo.gl/lWTvcw;Sensitivity=LOW;SSA_Window_in_Samples=12;SSA_EigenvaluesThreshold=0.7;ValueColum=fvalue
|
||||
#MAPS_COMPARISON
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON&DataInputs=TimeIndex_1=0;ValuesComparisonThreshold=0.1;TimeIndex_2=0;Z=0;KThreshold=0.5;Layer_1=483a4a32-729e-422b-b5e4-49f27ba93ec2;Layer_2=483a4a32-729e-422b-b5e4-49f27ba93ec2;
|
||||
#QUALITY_ANALYSIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS&DataInputs=NegativeCasesTableKeyColumn=csquarecode;DistributionTableProbabilityColumn=probability;PositiveCasesTableKeyColumn=csquarecode;PositiveThreshold=0.8;PositiveCasesTable={PROTOCOL}://goo.gl/8zWU7u;DistributionTableKeyColumn=csquarecode;DistributionTable={PROTOCOL}://goo.gl/cXbg2n;NegativeThreshold=0.3;NegativeCasesTable={PROTOCOL}://goo.gl/8zWU7u;
|
||||
#DISCREPANCY_ANALYSIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS&DataInputs=ComparisonThreshold=0.1;SecondTable={PROTOCOL}://goo.gl/cXbg2n;FirstTable={PROTOCOL}://goo.gl/BBk8iB;KThreshold=0.5;MaxSamples=10000;FirstTableProbabilityColumn=probability;SecondTableProbabilityColumn=probability;FirstTableCsquareColumn=csquarecode;SecondTableCsquareColumn=csquarecode
|
||||
#XYEXTRACTOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR&DataInputs=OutputTableLabel=wps_xy_extractor;Layer=483a4a32-729e-422b-b5e4-49f27ba93ec2;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
#TIMEEXTRACTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION&DataInputs=SamplingFreq=-1;X=28;Y=38;Z=0;Resolution=0.5;Layer=66a8d44f-6be7-4d21-a7f1-fea7485bd319;OutputTableLabel=wps_time_extr
|
||||
#ZEXTRACTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION&DataInputs=OutputTableLabel=wps_z_extractor;Layer=483a4a32-729e-422b-b5e4-49f27ba93ec2;Resolution=100;Y=38;TimeIndex=0;X=28
|
||||
#XYEXTRACTOR_TABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE&DataInputs=TimeIndex=0;Z=0;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;geoReferencedTableName={PROTOCOL}://goo.gl/KjWYQG;valueColumn=oceanarea;XResolution=0.5;YResolution=0.5;BBox_UpperRightLong=50;BBox_LowerLeftLat=-60;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;OutputTableLabel=wps_xy_extr_table;
|
||||
#TIMEEXTRACTION_TABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE&DataInputs=Z=0;Resolution=0.5;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;Y=3.75;X=102.25;geoReferencedTableName={PROTOCOL}://goo.gl/VDzpch;valueColumn=centerlong;SamplingFreq=-1;OutputTableLabel=wps_time_extr_table;
|
||||
#ZEXTRACTION_TABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE&DataInputs=TimeIndex=0;Resolution=1;filter= ;zColumn=centerlong;xColumn=centerlong;yColumn=centerlat;Y=0.25;X=0.25;geoReferencedTableName={PROTOCOL}://goo.gl/VDzpch;valueColumn=oceanarea;OutputTableLabel=wps_z_table;
|
||||
#HRS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS&DataInputs=PositiveCasesTable={PROTOCOL}://goo.gl/VDzpch;NegativeCasesTable={PROTOCOL}://goo.gl/VDzpch;OptionalCondition= ;ProjectingAreaTable={PROTOCOL}://goo.gl/VDzpch;FeaturesColumns=depthmin|depthmax;
|
||||
#SGVM_INTERPOLATION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION&DataInputs=headingAdjustment=0;maxspeedThr=6;minspeedThr=2;fm=0.5;margin=10;distscale=20;res=100;sigline=0.2;interval=120;equalDist=true;InputFile={PROTOCOL}://goo.gl/i16kPw;npoints=10;method=cHs;
|
||||
#BIOCLIMATE_HCAF
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF&DataInputs=HCAF_Table_List={PROTOCOL}://goo.gl/LTqufC|{PROTOCOL}://goo.gl/LTqufC;HCAF_Table_Names=h1|h2
|
||||
#BIOCLIMATE_HSPEN
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN&DataInputs=HSPEN_Table_List={PROTOCOL}://data-d.d4science.org/dDRpendoOEpnUE5nRzM4WHQ3RWVlVUorb3lwa2wzNWJHbWJQNStIS0N6Yz0|{PROTOCOL}://data-d.d4science.org/dDRpendoOEpnUE5nRzM4WHQ3RWVlVUorb3lwa2wzNWJHbWJQNStIS0N6Yz0;HSPEN_Table_Names=h1|h2;
|
||||
#CSQUARES_TO_COORDINATES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARES_TO_COORDINATES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARES_TO_COORDINATES&DataInputs=InputTable={PROTOCOL}://data-d.d4science.org/d1lBOG0raU9UdmRnRzM4WHQ3RWVlV1QrWWIrQlJHcHpHbWJQNStIS0N6Yz0;ColumnWithCodes=csquarecode;OutputTableName=csq_;
|
||||
#GRID_CWP_TO_COORDINATES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GRID_CWP_TO_COORDINATES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GRID_CWP_TO_COORDINATES&DataInputs=InputTable={PROTOCOL}://data.d4science.org/Mm5hblE0NG9KWHFMRGw0L24xTFlMZ2t2dGRKakNRVnhHbWJQNStIS0N6Yz0;ColumnWithCodes=fao_ocean_area;OutputTableName=cwp_;
|
||||
#FEED_FORWARD_ANN
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.modellers.FEED_FORWARD_ANN
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.modellers.FEED_FORWARD_ANN&DataInputs=TrainingDataSet={PROTOCOL}://data-d.d4science.org/UndsT051bEZwbEpnRzM4WHQ3RWVlVDQ0eHNITWgzRXdHbWJQNStIS0N6Yz0;TrainingColumns=a|b;TargetColumn=t;LayersNeurons=1;Reference=1;LearningThreshold=0.001;MaxIterations=1000;ModelName=neuralnet_t;
|
||||
#FEED_FORWARD_A_N_N_DISTRIBUTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.FEED_FORWARD_A_N_N_DISTRIBUTION
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.FEED_FORWARD_A_N_N_DISTRIBUTION&DataInputs=FeaturesTable={PROTOCOL}://data-d.d4science.org/UndsT051bEZwbEpnRzM4WHQ3RWVlVDQ0eHNITWgzRXdHbWJQNStIS0N6Yz0;FeaturesColumnNames=a|b;FinalTableLabel=Distrib_t;ModelName={PROTOCOL}://goo.gl/ggiPyX;
|
||||
#OCCURRENCES_DUPLICATES_DELETER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_DUPLICATES_DELETER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_DUPLICATES_DELETER&DataInputs=final_Table_Name=DeletedOcc_;OccurrencePointsTableName={PROTOCOL}://data-d.d4science.org/KzI1TmN5TCtJT2hnRzM4WHQ3RWVlZlZLdCttTThpUnRHbWJQNStIS0N6Yz0;longitudeColumn=decimalLongitude;latitudeColumn=decimalLatitude;recordedByColumn=recordedBy;scientificNameColumn=scientificName;eventDateColumn=eventDate;lastModificationColumn=modified;spatialTolerance=0.5;confidence=80;
|
||||
#ESTIMATE_FISHING_ACTIVITY
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_FISHING_ACTIVITY
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_FISHING_ACTIVITY&DataInputs=InputTable={PROTOCOL}://data-d.d4science.org/Z0V3UGRHeHJoVHYzcEdBekVHU3E4S3BiWEt2YjZVZVlHbWJQNStIS0N6Yz0;VesselsIDColumn=vesselid;VesselsSpeedsColumn=speed;VesselsTimestampsColumn=datetime;VesselsLatitudesColumn=y;VesselsLongitudesColumn=x;OutputTableName=fish_;
|
||||
#ESTIMATE_MONTHLY_FISHING_EFFORT
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_MONTHLY_FISHING_EFFORT
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_MONTHLY_FISHING_EFFORT&DataInputs=InputTable={PROTOCOL}://data.d4science.org/N0QyZ3JRMzJ6ZmxaZWYvY3g2bDZNcG8wV2RZTjhTcEtHbWJQNStIS0N6Yz0;VesselsLatitudesColumn=y;VesselsLongitudesColumn=x;VesselsTimestampsColumn=datetime;VesselsActivityHoursColumn=activity_hours;VesselsActivityClassificationColumn=activity_class_speed;OutputTableName=monthfish_;
|
||||
#Submit query
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY&DataInputs=DatabaseName=fishbase;Query=select * from food limit 100;Apply Smart Correction=false;Language=POSTGRES;ResourceName=FishBase;Read-Only Query=true;
|
||||
#LISTDBNAMES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBNAMES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBNAMES&DataInputs=MaxNumber=-1;
|
||||
#LISTDBINFO
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBINFO
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBINFO&DataInputs=ResourceName=StatisticalManagerDataBase;
|
||||
#LISTDBSCHEMA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBSCHEMA
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBSCHEMA&DataInputs=ResourceName=StatisticalManagerDataBase;DatabaseName=dataSpace;
|
||||
#LISTTABLES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTTABLES
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTTABLES&DataInputs=ResourceName=StatisticalManagerDataBase;DatabaseName=dataSpace;SchemaName=public;
|
||||
#SAMPLEONTABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SAMPLEONTABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SAMPLEONTABLE&DataInputs=ResourceName=StatisticalManagerDataBase;DatabaseName=dataSpace;SchemaName=public;TableName=smentry;
|
||||
#SMARTSAMPLEONTABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SMARTSAMPLEONTABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SMARTSAMPLEONTABLE&DataInputs=ResourceName=StatisticalManagerDataBase;DatabaseName=dataSpace;SchemaName=public;TableName=smentry;
|
||||
#SUBMITQUERY
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY&DataInputs=ResourceName=StatisticalManagerDataBase;DatabaseName=dataSpace;Read-Only Query=true;Apply Smart Correction=true;Language=NONE;Query=select * from smentry limit 2;
|
||||
#Raster data publisher
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER&DataInputs=PublicationLevel=PUBLIC;DatasetAbstract=Abstract;DatasetTitle=Generic Raster Layer Test3;RasterFile={PROTOCOL}://data-d.d4science.org/QnFMOUdZNGhpcWVxTlNZNDVsZ01FTGN6S3lTR20wbE9HbWJQNStIS0N6Yz0;InnerLayerName=analyzed_field;FileNameOnInfra=raster-1465493226242.nc;Topics=analyzed_field;SpatialResolution=-1;
|
||||
#Web app publisher
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.WEB_APP_PUBLISHER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.WEB_APP_PUBLISHER&DataInputs=ZipFile={PROTOCOL}://goo.gl/dYQ089;
|
||||
#ECOPATH_WITH_ECOSIM
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ECOPATH_WITH_ECOSIM
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ECOPATH_WITH_ECOSIM&DataInputs=Model File={PROTOCOL}://data-d.d4science.org/emRmUEFtRGVZMnozcEdBekVHU3E4TlVyNmFHOU8yNDFHbWJQNStIS0N6Yz0;Config File={PROTOCOL}://data-d.d4science.org/emRmUEFtRGVZMnozcEdBekVHU3E4Skt5dkh2OXJObHFHbWJQNStIS0N6Yz0;
|
||||
#OCCURRENCES_MERGER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.OCCURRENCES_MERGER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.OCCURRENCES_MERGER&DataInputs=final_Table_Name=MergedOcc;leftTableName={PROTOCOL}://data-d.d4science.org/KzI1TmN5TCtJT2hnRzM4WHQ3RWVlZlZLdCttTThpUnRHbWJQNStIS0N6Yz0;rightTableName={PROTOCOL}://data-d.d4science.org/KzI1TmN5TCtJT2hnRzM4WHQ3RWVlZlZLdCttTThpUnRHbWJQNStIS0N6Yz0;longitudeColumn=decimalLongitude;latitudeColumn=decimalLatitude;recordedByColumn=recordedBy;scientificNameColumn=scientificName;eventDateColumn=eventDate;lastModificationColumn=modified;spatialTolerance=0.5;confidence=80;
|
||||
#AQUAMAPS_SUITABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.AQUAMAPS_SUITABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.AQUAMAPS_SUITABLE&DataInputs=EnvelopeTable={PROTOCOL}://data.d4science.org/OFNMKzZLdGZrT3RmcElhcUlmQUpWbVpvS2h0UTEvWFRHbWJQNStIS0N6Yz0;CsquarecodesTable={PROTOCOL}://data.d4science.org/WUozaVh4aEN5ZTFmcElhcUlmQUpWaUd5ZlkwQ1RKT25HbWJQNStIS0N6Yz0;DistributionTableLabel=hspec;OccurrencePointsTable={PROTOCOL}://data.d4science.org/ZGVCYjJaWTFmaGhmcElhcUlmQUpWb2NoYVFvclBZaG5HbWJQNStIS0N6Yz0;
|
||||
#AQUAMAPS_SUITABLE 21 sp
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.AQUAMAPS_SUITABLE
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.AQUAMAPS_SUITABLE&DataInputs=EnvelopeTable={PROTOCOL}://data.d4science.org/b2hOQ1phWEVGcUxDZWZucS9UQkJmWG9JT2JDNUlTbjhHbWJQNStIS0N6Yz0;CsquarecodesTable={PROTOCOL}://data.d4science.org/d2JpZUZ4VkRvVTlmcElhcUlmQUpWdE1mOGZTZ0xhNHlHbWJQNStIS0N6Yz0;DistributionTableLabel=hspec;OccurrencePointsTable={PROTOCOL}://data.d4science.org/ZGVCYjJaWTFmaGhmcElhcUlmQUpWb2NoYVFvclBZaG5HbWJQNStIS0N6Yz0;
|
||||
#SEADATANET_INTERPOLATOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SEADATANET_INTERPOLATOR
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SEADATANET_INTERPOLATOR&DataInputs=InputTable={PROTOCOL}://data.d4science.org/SWxzeUlNZEpDVkpmcElhcUlmQUpWamVsWll3VitvVTdHbWJQNStIS0N6Yz0;Longitude=centerlong;Latitude=centerlat;Quantity=lme;LongitudeMinValue=100;LongitudeMaxValue=115;LongitudeResolution=1;LatitudeMinValue=2;LatitudeMaxValue=4;LatitudeResolution=1;CorrelationLength=1;SignalNoise=1.08;DepthLevel=0;
|
||||
#BIONYM1024
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=GSAy;Matcher_4=TRIGRAM;Matcher_5=NONE;Matcher_2=FUZZYMATCH;Matcher_3=LEVENSHTEIN;Threshold_1=0.6;RawTaxaNamesTable={PROTOCOL}://data.d4science.org/TnVpOG5qeWpBZVpmcElhcUlmQUpWczFPelJuelllTTJHbWJQNStIS0N6Yz0;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=field0;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=5;Threshold_4=0.4;MaxResults_3=5;MaxResults_5=0;Threshold_5=0.2;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
#ABSENCE_GENERATION_FROM_OBIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_GENERATION_FROM_OBIS
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_GENERATION_FROM_OBIS&DataInputs=list={PROTOCOL}://data.d4science.org/d2pCK1Zra0VNTUJmcElhcUlmQUpWanJ0M0lBeEJUeXBHbWJQNStIS0N6Yz0;res=1;occ_percentage=0.1;
|
||||
#SHAPEFILE_IMPORTER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SHAPEFILE_PUBLISHER
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SHAPEFILE_PUBLISHER&DataInputs=PublicationLevel=PUBLIC;MapTitle=TestShape;ShapeFileName=shapefile2.shp;MapAbstract=Test;ShapeFileZip={PROTOCOL}://data.d4science.org/Z252VlVHRWNabFJmcElhcUlmQUpWcU1RM1ByVEQxK1lHbWJQNStIS0N6Yz0;Topics=Test|GP;
|
||||
#POINTS_TO_MAP
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token={TOKEN}&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.POINTS_TO_MAP
|
||||
{PROTOCOL}://{HOST}/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token={TOKEN}&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.POINTS_TO_MAP&DataInputs=PublicationLevel=PUBLIC;MapName=TestPointsMap;InputTable={PROTOCOL}://data.d4science.org/SWxzeUlNZEpDVkpmcElhcUlmQUpWamVsWll3VitvVTdHbWJQNStIS0N6Yz0;xDimension=centerlong;yDimension=centerlat;Info=lme
|
Loading…
Reference in New Issue