This commit is contained in:
Gianpaolo Coro 2016-05-11 11:35:14 +00:00
parent 1e336b250e
commit e459abb522
48 changed files with 28 additions and 1248 deletions

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="DBSCAN", abstrakt="A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN", version = "1.1.0")
public class DBSCAN extends AbstractEcologicalEngineMapper implements IClusterer{
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "OccurrencePointsTable", binding = GenericFileDataBinding.class) public void setOccurrencePointsTable(GenericFileData file) {inputs.put("OccurrencePointsTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution", defaultValue="OccCluster_", title="table name of the resulting distribution", identifier = "OccurrencePointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOccurrencePointsClusterLabel(String data) {inputs.put("OccurrencePointsClusterLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: epsilon. DBScan epsilon parameter", defaultValue="10", title="DBScan epsilon parameter", identifier = "epsilon", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setepsilon(Integer data) {inputs.put("epsilon",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers)", defaultValue="1", title="DBScan minimum points parameter (identifies outliers)", identifier = "min_points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmin_points(Integer data) {inputs.put("min_points",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,28 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="KMEANS", abstrakt="A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS", version = "1.1.0")
public class KMEANS extends AbstractEcologicalEngineMapper implements IClusterer{
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "OccurrencePointsTable", binding = GenericFileDataBinding.class) public void setOccurrencePointsTable(GenericFileData file) {inputs.put("OccurrencePointsTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution", defaultValue="OccCluster_", title="table name of the resulting distribution", identifier = "OccurrencePointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOccurrencePointsClusterLabel(String data) {inputs.put("OccurrencePointsClusterLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: k. expected Number of Clusters", defaultValue="3", title="expected Number of Clusters", identifier = "k", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setk(Integer data) {inputs.put("k",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: max_runs. max runs of the clustering procedure", defaultValue="10", title="max runs of the clustering procedure", identifier = "max_runs", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmax_runs(Integer data) {inputs.put("max_runs",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: max_optimization_steps. max number of internal optimization steps", defaultValue="5", title="max number of internal optimization steps", identifier = "max_optimization_steps", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmax_optimization_steps(Integer data) {inputs.put("max_optimization_steps",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: min_points. number of points which define an outlier set", defaultValue="2", title="number of points which define an outlier set", identifier = "min_points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmin_points(Integer data) {inputs.put("min_points",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,28 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="LOF", abstrakt="Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF", version = "1.1.0")
public class LOF extends AbstractEcologicalEngineMapper implements IClusterer{
@ComplexDataInput(abstrakt="Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "PointsTable", binding = GenericFileDataBinding.class) public void setPointsTable(GenericFileData file) {inputs.put("PointsTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from PointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
@LiteralDataInput(abstrakt="Name of the parameter: PointsClusterLabel. table name of the resulting distribution", defaultValue="Cluster_", title="table name of the resulting distribution", identifier = "PointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setPointsClusterLabel(String data) {inputs.put("PointsClusterLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors", defaultValue="2", title="locality (usually called k): minimal number of nearest neighbors", identifier = "minimal_points_lower_bound", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setminimal_points_lower_bound(Integer data) {inputs.put("minimal_points_lower_bound",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation", defaultValue="10", title="maximum number of nearest neighbors to take into account for outliers evaluation", identifier = "minimal_points_upper_bound", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setminimal_points_upper_bound(Integer data) {inputs.put("minimal_points_upper_bound",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: distance_function. the distance function to use in the calculation", allowedValues= {"euclidian distance","squared distance","cosine distance","inverted cosine distance","angle"}, defaultValue="euclidian distance", title="the distance function to use in the calculation", identifier = "distance_function", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setdistance_function(String data) {inputs.put("distance_function",data);}
@LiteralDataInput(abstrakt="Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2)", defaultValue="2", title="the LOF score threshold over which the point is an outlier (usually 2)", identifier = "lof_threshold", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setlof_threshold(Integer data) {inputs.put("lof_threshold",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,28 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="XMEANS", abstrakt="A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS", version = "1.1.0")
public class XMEANS extends AbstractEcologicalEngineMapper implements IClusterer{
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "OccurrencePointsTable", binding = GenericFileDataBinding.class) public void setOccurrencePointsTable(GenericFileData file) {inputs.put("OccurrencePointsTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution", defaultValue="OccCluster_", title="table name of the resulting distribution", identifier = "OccurrencePointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOccurrencePointsClusterLabel(String data) {inputs.put("OccurrencePointsClusterLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning", defaultValue="10", title="XMeans max number of overall iterations of the clustering learning", identifier = "maxIterations", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmaxIterations(Integer data) {inputs.put("maxIterations",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: minClusters. minimum number of expected clusters", defaultValue="1", title="minimum number of expected clusters", identifier = "minClusters", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setminClusters(Integer data) {inputs.put("minClusters",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: maxClusters. maximum number of clusters to produce", defaultValue="50", title="maximum number of clusters to produce", identifier = "maxClusters", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmaxClusters(Integer data) {inputs.put("maxClusters",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: min_points. number of points which define an outlier set", defaultValue="2", title="number of points which define an outlier set", identifier = "min_points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmin_points(Integer data) {inputs.put("min_points",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,30 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="DISCREPANCY_ANALYSIS", abstrakt="An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS", version = "1.1.0")
public class DISCREPANCY_ANALYSIS extends AbstractEcologicalEngineMapper implements IEvaluator{
@ComplexDataInput(abstrakt="Name of the parameter: FirstTable. First Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="First Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "FirstTable", binding = GenericFileDataBinding.class) public void setFirstTable(GenericFileData file) {inputs.put("FirstTable",file);}
@ComplexDataInput(abstrakt="Name of the parameter: SecondTable. Second Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="Second Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "SecondTable", binding = GenericFileDataBinding.class) public void setSecondTable(GenericFileData file) {inputs.put("SecondTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: FirstTableCsquareColumn. the csquares column name in the first table [the name of a column from FirstTable]", defaultValue="csquarecode", title="the csquares column name in the first table [the name of a column from FirstTable]", identifier = "FirstTableCsquareColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFirstTableCsquareColumn(String data) {inputs.put("FirstTableCsquareColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: SecondTableCsquareColumn. the csquares column name in the second table [the name of a column from SecondTable]", defaultValue="csquarecode", title="the csquares column name in the second table [the name of a column from SecondTable]", identifier = "SecondTableCsquareColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSecondTableCsquareColumn(String data) {inputs.put("SecondTableCsquareColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: FirstTableProbabilityColumn. the probability column in the first table [the name of a column from FirstTable]", defaultValue="probability", title="the probability column in the first table [the name of a column from FirstTable]", identifier = "FirstTableProbabilityColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFirstTableProbabilityColumn(String data) {inputs.put("FirstTableProbabilityColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: SecondTableProbabilityColumn. the probability column in the second table [the name of a column from SecondTable]", defaultValue="probability", title="the probability column in the second table [the name of a column from SecondTable]", identifier = "SecondTableProbabilityColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSecondTableProbabilityColumn(String data) {inputs.put("SecondTableProbabilityColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: ComparisonThreshold. the comparison threshold", defaultValue="0.1", title="the comparison threshold", identifier = "ComparisonThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setComparisonThreshold(Double data) {inputs.put("ComparisonThreshold",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxSamples. the comparison threshold", defaultValue="10000", title="the comparison threshold", identifier = "MaxSamples", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxSamples(Integer data) {inputs.put("MaxSamples",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: KThreshold. Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", defaultValue="0.5", title="Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", identifier = "KThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setKThreshold(Double data) {inputs.put("KThreshold",""+data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="HRS", abstrakt="An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS", version = "1.1.0")
public class HRS extends AbstractEcologicalEngineMapper implements IEvaluator{
@ComplexDataInput(abstrakt="Name of the parameter: ProjectingAreaTable. A Table containing projecting area information [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing projecting area information [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "ProjectingAreaTable", binding = GenericFileDataBinding.class) public void setProjectingAreaTable(GenericFileData file) {inputs.put("ProjectingAreaTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: OptionalCondition. optional filter for taking area rows", defaultValue="where oceanarea>0", title="optional filter for taking area rows", identifier = "OptionalCondition", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOptionalCondition(String data) {inputs.put("OptionalCondition",data);}
@ComplexDataInput(abstrakt="Name of the parameter: PositiveCasesTable. A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "PositiveCasesTable", binding = GenericFileDataBinding.class) public void setPositiveCasesTable(GenericFileData file) {inputs.put("PositiveCasesTable",file);}
@ComplexDataInput(abstrakt="Name of the parameter: NegativeCasesTable. A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "NegativeCasesTable", binding = GenericFileDataBinding.class) public void setNegativeCasesTable(GenericFileData file) {inputs.put("NegativeCasesTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumns. Features columns [a sequence of names of columns from PositiveCasesTable separated by | ]", defaultValue="", title="Features columns [a sequence of names of columns from PositiveCasesTable separated by | ]", identifier = "FeaturesColumns", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumns(String data) {inputs.put("FeaturesColumns",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,28 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="MAPS_COMPARISON", abstrakt="An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON", version = "1.1.0")
public class MAPS_COMPARISON extends AbstractEcologicalEngineMapper implements IEvaluator{
@LiteralDataInput(abstrakt="Name of the parameter: Layer_1. First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", defaultValue="", title="First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", identifier = "Layer_1", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer_1(String data) {inputs.put("Layer_1",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Layer_2. Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", defaultValue="", title="Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", identifier = "Layer_2", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer_2(String data) {inputs.put("Layer_2",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Z. value of Z. Default is 0, that means comparison will be at surface level", defaultValue="0", title="value of Z. Default is 0, that means comparison will be at surface level", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setZ(Integer data) {inputs.put("Z",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: ValuesComparisonThreshold. A comparison threshold for the values in the map. Null equals to 0.1", defaultValue="0.1", title="A comparison threshold for the values in the map. Null equals to 0.1", identifier = "ValuesComparisonThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setValuesComparisonThreshold(Double data) {inputs.put("ValuesComparisonThreshold",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex_1. First Layer Time Index. The default is the first", defaultValue="0", title="First Layer Time Index. The default is the first", identifier = "TimeIndex_1", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex_1(Integer data) {inputs.put("TimeIndex_1",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex_2. Second Layer Time Index. The default is the first", defaultValue="0", title="Second Layer Time Index. The default is the first", identifier = "TimeIndex_2", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex_2(Integer data) {inputs.put("TimeIndex_2",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: KThreshold. Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", defaultValue="0.5", title="Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", identifier = "KThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setKThreshold(Double data) {inputs.put("KThreshold",""+data);}
@ComplexDataOutput(abstrakt="Error Distribution", title="Error Distribution", identifier = "Error Distribution", binding = PngFileDataBinding.class) public GenericFileData getDistribution_of_the_Error() {URL url=null;try {url = new URL((String) outputs.get("Error Distribution")); return new GenericFileData(url.openStream(),"image/png");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,30 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="QUALITY_ANALYSIS", abstrakt="An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS", version = "1.1.0")
public class QUALITY_ANALYSIS extends AbstractEcologicalEngineMapper implements IEvaluator{
@ComplexDataInput(abstrakt="Name of the parameter: PositiveCasesTable. A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "PositiveCasesTable", binding = GenericFileDataBinding.class) public void setPositiveCasesTable(GenericFileData file) {inputs.put("PositiveCasesTable",file);}
@ComplexDataInput(abstrakt="Name of the parameter: NegativeCasesTable. A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "NegativeCasesTable", binding = GenericFileDataBinding.class) public void setNegativeCasesTable(GenericFileData file) {inputs.put("NegativeCasesTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: PositiveCasesTableKeyColumn. Positive Cases Table Key Column [the name of a column from PositiveCasesTable]", defaultValue="csquarecode", title="Positive Cases Table Key Column [the name of a column from PositiveCasesTable]", identifier = "PositiveCasesTableKeyColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setPositiveCasesTableKeyColumn(String data) {inputs.put("PositiveCasesTableKeyColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: NegativeCasesTableKeyColumn. Negative Cases Table Key Column [the name of a column from NegativeCasesTable]", defaultValue="csquarecode", title="Negative Cases Table Key Column [the name of a column from NegativeCasesTable]", identifier = "NegativeCasesTableKeyColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setNegativeCasesTableKeyColumn(String data) {inputs.put("NegativeCasesTableKeyColumn",data);}
@ComplexDataInput(abstrakt="Name of the parameter: DistributionTable. A probability distribution table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="A probability distribution table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "DistributionTable", binding = GenericFileDataBinding.class) public void setDistributionTable(GenericFileData file) {inputs.put("DistributionTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: DistributionTableKeyColumn. Distribution Table Key Column [the name of a column from DistributionTable]", defaultValue="csquarecode", title="Distribution Table Key Column [the name of a column from DistributionTable]", identifier = "DistributionTableKeyColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDistributionTableKeyColumn(String data) {inputs.put("DistributionTableKeyColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: DistributionTableProbabilityColumn. Distribution Table Probability Column [the name of a column from DistributionTable]", defaultValue="probability", title="Distribution Table Probability Column [the name of a column from DistributionTable]", identifier = "DistributionTableProbabilityColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDistributionTableProbabilityColumn(String data) {inputs.put("DistributionTableProbabilityColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: PositiveThreshold. Positive acceptance threshold", defaultValue="0.8", title="Positive acceptance threshold", identifier = "PositiveThreshold", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setPositiveThreshold(String data) {inputs.put("PositiveThreshold",data);}
@LiteralDataInput(abstrakt="Name of the parameter: NegativeThreshold. Negative acceptance threshold", defaultValue="0.3", title="Negative acceptance threshold", identifier = "NegativeThreshold", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setNegativeThreshold(String data) {inputs.put("NegativeThreshold",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,44 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="BIONYM", abstrakt="An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM", version = "1.1.0")
public class BIONYM extends AbstractEcologicalEngineMapper implements IGenerator{
@ComplexDataInput(abstrakt="Name of the parameter: RawTaxaNamesTable. Input table containing raw taxa names that you want to match [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Input table containing raw taxa names that you want to match [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "RawTaxaNamesTable", binding = GenericFileDataBinding.class) public void setRawTaxaNamesTable(GenericFileData file) {inputs.put("RawTaxaNamesTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: RawNamesColumn. The column containing the raw taxa names with or without authoship information [the name of a column from RawTaxaNamesTable]", defaultValue="rawnames", title="The column containing the raw taxa names with or without authoship information [the name of a column from RawTaxaNamesTable]", identifier = "RawNamesColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setRawNamesColumn(String data) {inputs.put("RawNamesColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. Name of the table which will contain the matches", defaultValue="bionout", title="Name of the table which will contain the matches", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Taxa_Authority_File. The reference dataset to use", allowedValues= {"ASFIS","FISHBASE","OBIS","OBIS_ANIMALIA","OBIS_CNIDARIA","OBIS_ECHINODERMATA","OBIS_PLATYHELMINTHES","COL_FULL","COL_CHORDATA","COL_MAMMALIA","IRMNG_ACTINOPTERYGII","WORMS_ANIMALIA","WORMS_PISCES"}, defaultValue="FISHBASE", title="The reference dataset to use", identifier = "Taxa_Authority_File", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTaxa_Authority_File(String data) {inputs.put("Taxa_Authority_File",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Parser_Name. The Species - Authority parser", allowedValues= {"SIMPLE","GNI","NONE"}, defaultValue="SIMPLE", title="The Species - Authority parser", identifier = "Parser_Name", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setParser_Name(String data) {inputs.put("Parser_Name",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Activate_Preparsing_Processing. Use preparsing rules to correct common errors", defaultValue="true", allowedValues= {"true","false"}, title="Use preparsing rules to correct common errors", identifier = "Activate_Preparsing_Processing", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setActivate_Preparsing_Processing(Boolean data) {inputs.put("Activate_Preparsing_Processing",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Use_Stemmed_Genus_and_Species. Process using Genus and Species names without declension", defaultValue="false", allowedValues= {"true","false"}, title="Process using Genus and Species names without declension", identifier = "Use_Stemmed_Genus_and_Species", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setUse_Stemmed_Genus_and_Species(Boolean data) {inputs.put("Use_Stemmed_Genus_and_Species",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Accuracy_vs_Speed. A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", allowedValues= {"MAX_ACCURACY","LOW_SPEED","MEDIUM_SPEED","HIGH_SPEED","MAX_SPEED"}, defaultValue="MAX_ACCURACY", title="A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", identifier = "Accuracy_vs_Speed", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAccuracy_vs_Speed(String data) {inputs.put("Accuracy_vs_Speed",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_1. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="GSAy", title="Choose a Matcher", identifier = "Matcher_1", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_1(String data) {inputs.put("Matcher_1",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_1. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_1", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_1(Double data) {inputs.put("Threshold_1",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_1. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_1", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_1(Integer data) {inputs.put("MaxResults_1",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_2. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="FUZZYMATCH", title="Choose a Matcher", identifier = "Matcher_2", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_2(String data) {inputs.put("Matcher_2",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_2. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_2", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_2(Double data) {inputs.put("Threshold_2",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_2. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_2", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_2(Integer data) {inputs.put("MaxResults_2",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_3. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="LEVENSHTEIN", title="Choose a Matcher", identifier = "Matcher_3", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_3(String data) {inputs.put("Matcher_3",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_3. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_3", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_3(Double data) {inputs.put("Threshold_3",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_3. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_3", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_3(Integer data) {inputs.put("MaxResults_3",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_4. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="TRIGRAM", title="Choose a Matcher", identifier = "Matcher_4", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_4(String data) {inputs.put("Matcher_4",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_4. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_4", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_4(Double data) {inputs.put("Threshold_4",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_4. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_4", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_4(Integer data) {inputs.put("MaxResults_4",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_5. Choose a Matcher (Optional)", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="NONE", title="Choose a Matcher (Optional)", identifier = "Matcher_5", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_5(String data) {inputs.put("Matcher_5",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_5. Threshold (def. 0.2)", defaultValue="0.2", title="Threshold (def. 0.2)", identifier = "Threshold_5", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_5(Double data) {inputs.put("Threshold_5",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_5. The maximum number of matching candidates per each raw input species", defaultValue="0", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_5", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_5(Integer data) {inputs.put("MaxResults_5",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,23 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="CMSY", abstrakt="An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY", version = "1.1.0")
public class CMSY extends AbstractEcologicalEngineMapper implements IGenerator{
@LiteralDataInput(abstrakt="Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK", defaultValue="", title="Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK", identifier = "IDsFile", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setIDsFile(String data) {inputs.put("IDsFile",data);}
@LiteralDataInput(abstrakt="Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY", defaultValue="", title="Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY", identifier = "StocksFile", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStocksFile(String data) {inputs.put("StocksFile",data);}
@LiteralDataInput(abstrakt="Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07", defaultValue="", title="The stock on which the procedure has to focus e.g. HLH_M07", identifier = "SelectedStock", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelectedStock(String data) {inputs.put("SelectedStock",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,32 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="ICCAT_VPA", abstrakt="An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA", version = "1.1.0")
public class ICCAT_VPA extends AbstractEcologicalEngineMapper implements IGenerator{
@LiteralDataInput(abstrakt="Name of the parameter: StartYear. First year of the dataset temporal extent", defaultValue="1950", title="First year of the dataset temporal extent", identifier = "StartYear", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setStartYear(Integer data) {inputs.put("StartYear",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: EndYear. Last year of the dataset temporal extent", defaultValue="2013", title="Last year of the dataset temporal extent", identifier = "EndYear", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setEndYear(Integer data) {inputs.put("EndYear",""+data);}
@ComplexDataInput(abstrakt="Name of the parameter: CAAFile. Catch at Age Matrix (Number of Fish caught by year and for each age)", title="Catch at Age Matrix (Number of Fish caught by year and for each age)", maxOccurs=1, minOccurs=1, identifier = "CAAFile", binding = D4ScienceDataInputBinding.class) public void setCAAFile(GenericFileData file) {inputs.put("CAAFile",file);}
@ComplexDataInput(abstrakt="Name of the parameter: PCAAFile. Partial Catch at Age Matrix (Number of Fish caught by gear and year and for each age)", title="Partial Catch at Age Matrix (Number of Fish caught by gear and year and for each age)", maxOccurs=1, minOccurs=1, identifier = "PCAAFile", binding = D4ScienceDataInputBinding.class) public void setPCAAFile(GenericFileData file) {inputs.put("PCAAFile",file);}
@ComplexDataInput(abstrakt="Name of the parameter: CPUEFile. Table of Catch Per Unit of Effort used in the stock assessment", title="Table of Catch Per Unit of Effort used in the stock assessment", maxOccurs=1, minOccurs=1, identifier = "CPUEFile", binding = D4ScienceDataInputBinding.class) public void setCPUEFile(GenericFileData file) {inputs.put("CPUEFile",file);}
@ComplexDataInput(abstrakt="Name of the parameter: PwaaFile. Partial weight at age (Weight of Fish caught by gear and year and for each age)", title="Partial weight at age (Weight of Fish caught by gear and year and for each age)", maxOccurs=1, minOccurs=1, identifier = "PwaaFile", binding = D4ScienceDataInputBinding.class) public void setPwaaFile(GenericFileData file) {inputs.put("PwaaFile",file);}
@ComplexDataInput(abstrakt="Name of the parameter: waaFile. Fecundity at age (Fecundity of Fish caught by year and for each age)", title="Fecundity at age (Fecundity of Fish caught by year and for each age)", maxOccurs=1, minOccurs=1, identifier = "waaFile", binding = D4ScienceDataInputBinding.class) public void setwaaFile(GenericFileData file) {inputs.put("waaFile",file);}
@LiteralDataInput(abstrakt="Name of the parameter: shortComment. Free text for users to describe the current simulation", defaultValue=" ", title="Free text for users to describe the current simulation", identifier = "shortComment", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setshortComment(String data) {inputs.put("shortComment",data);}
@LiteralDataInput(abstrakt="Name of the parameter: nCPUE. Number of Catch Per Unit of Effort Time series to use", defaultValue="7", title="Number of Catch Per Unit of Effort Time series to use", identifier = "nCPUE", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setnCPUE(Integer data) {inputs.put("nCPUE",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: CPUE_cut. Identifier of the Catch Per Unit of Effort Time Serie to be shrunk", defaultValue="1", title="Identifier of the Catch Per Unit of Effort Time Serie to be shrunk", identifier = "CPUE_cut", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setCPUE_cut(Integer data) {inputs.put("CPUE_cut",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: n_remove_year. Number of the (last) years to be removed", defaultValue="1", title="Number of the (last) years to be removed", identifier = "n_remove_year", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setn_remove_year(Integer data) {inputs.put("n_remove_year",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: age_plus_group. Maximal age class of catches to be taken into account", defaultValue="10", title="Maximal age class of catches to be taken into account", identifier = "age_plus_group", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setage_plus_group(Integer data) {inputs.put("age_plus_group",""+data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="ABSENCE_CELLS_FROM_AQUAMAPS", abstrakt="An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS", version = "1.1.0")
public class ABSENCE_CELLS_FROM_AQUAMAPS extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Table_Label. the name of the Filtered Hcaf", defaultValue="AbsenceCells_", title="the name of the Filtered Hcaf", identifier = "Table_Label", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTable_Label(String data) {inputs.put("Table_Label",data);}
@ComplexDataInput(abstrakt="Name of the parameter: Aquamaps_HSPEC. an Aquamaps table from which to produce the absence points [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="an Aquamaps table from which to produce the absence points [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "Aquamaps_HSPEC", binding = GenericFileDataBinding.class) public void setAquamaps_HSPEC(GenericFileData file) {inputs.put("Aquamaps_HSPEC",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Take_Randomly. a flag for taking points randomly (true) or close together (false)", defaultValue="true", allowedValues= {"true","false"}, title="a flag for taking points randomly (true) or close together (false)", identifier = "Take_Randomly", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setTake_Randomly(Boolean data) {inputs.put("Take_Randomly",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Number_of_Points. number of points to take", defaultValue="20", title="number of points to take", identifier = "Number_of_Points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setNumber_of_Points(Integer data) {inputs.put("Number_of_Points",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Species_Code. the species code according to the Fish-Base conventions", defaultValue="Fis-30189", title="the species code according to the Fish-Base conventions", identifier = "Species_Code", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies_Code(String data) {inputs.put("Species_Code",data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. a HCAF table containing Absence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="a HCAF table containing Absence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,42 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="BIONYM_LOCAL", abstrakt="A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL", version = "1.1.0")
public class BIONYM_LOCAL extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: SpeciesAuthorName. The scientific name of the species, possibly with authorship", defaultValue="Gadus morhua (Linnaeus, 1758)", title="The scientific name of the species, possibly with authorship", identifier = "SpeciesAuthorName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpeciesAuthorName(String data) {inputs.put("SpeciesAuthorName",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Taxa_Authority_File. The reference dataset to use", allowedValues= {"ASFIS","FISHBASE","OBIS","OBIS_ANIMALIA","OBIS_CNIDARIA","OBIS_ECHINODERMATA","OBIS_PLATYHELMINTHES","COL_FULL","COL_CHORDATA","COL_MAMMALIA","IRMNG_ACTINOPTERYGII","WORMS_ANIMALIA","WORMS_PISCES"}, defaultValue="FISHBASE", title="The reference dataset to use", identifier = "Taxa_Authority_File", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTaxa_Authority_File(String data) {inputs.put("Taxa_Authority_File",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Parser_Name. The Species - Authority parser", allowedValues= {"SIMPLE","GNI","NONE"}, defaultValue="SIMPLE", title="The Species - Authority parser", identifier = "Parser_Name", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setParser_Name(String data) {inputs.put("Parser_Name",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Activate_Preparsing_Processing. Use preparsing rules to correct common errors", defaultValue="true", allowedValues= {"true","false"}, title="Use preparsing rules to correct common errors", identifier = "Activate_Preparsing_Processing", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setActivate_Preparsing_Processing(Boolean data) {inputs.put("Activate_Preparsing_Processing",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Use_Stemmed_Genus_and_Species. Process using Genus and Species names without declension", defaultValue="false", allowedValues= {"true","false"}, title="Process using Genus and Species names without declension", identifier = "Use_Stemmed_Genus_and_Species", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setUse_Stemmed_Genus_and_Species(Boolean data) {inputs.put("Use_Stemmed_Genus_and_Species",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Accuracy_vs_Speed. A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", allowedValues= {"MAX_ACCURACY","LOW_SPEED","MEDIUM_SPEED","HIGH_SPEED","MAX_SPEED"}, defaultValue="MAX_ACCURACY", title="A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", identifier = "Accuracy_vs_Speed", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAccuracy_vs_Speed(String data) {inputs.put("Accuracy_vs_Speed",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_1. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="GSAy", title="Choose a Matcher", identifier = "Matcher_1", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_1(String data) {inputs.put("Matcher_1",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_1. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_1", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_1(Double data) {inputs.put("Threshold_1",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_1. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_1", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_1(Integer data) {inputs.put("MaxResults_1",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_2. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="FUZZYMATCH", title="Choose a Matcher", identifier = "Matcher_2", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_2(String data) {inputs.put("Matcher_2",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_2. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_2", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_2(Double data) {inputs.put("Threshold_2",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_2. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_2", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_2(Integer data) {inputs.put("MaxResults_2",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_3. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="LEVENSHTEIN", title="Choose a Matcher", identifier = "Matcher_3", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_3(String data) {inputs.put("Matcher_3",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_3. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_3", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_3(Double data) {inputs.put("Threshold_3",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_3. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_3", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_3(Integer data) {inputs.put("MaxResults_3",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_4. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="TRIGRAM", title="Choose a Matcher", identifier = "Matcher_4", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_4(String data) {inputs.put("Matcher_4",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_4. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_4", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_4(Double data) {inputs.put("Threshold_4",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_4. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_4", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_4(Integer data) {inputs.put("MaxResults_4",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_5. Choose a Matcher (Optional)", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="NONE", title="Choose a Matcher (Optional)", identifier = "Matcher_5", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_5(String data) {inputs.put("Matcher_5",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_5. Threshold (def. 0.2)", defaultValue="0.2", title="Threshold (def. 0.2)", identifier = "Threshold_5", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_5(Double data) {inputs.put("Threshold_5",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_5. The maximum number of matching candidates per each raw input species", defaultValue="0", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_5", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_5(Integer data) {inputs.put("MaxResults_5",""+data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="CSQUARE_COLUMN_CREATOR", abstrakt="An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR", version = "1.1.0")
public class CSQUARE_COLUMN_CREATOR extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Longitude_Column. The column containing Longitude information [the name of a column from InputTable]", defaultValue="x", title="The column containing Longitude information [the name of a column from InputTable]", identifier = "Longitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude_Column(String data) {inputs.put("Longitude_Column",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Latitude_Column. The column containing Latitude information [the name of a column from InputTable]", defaultValue="y", title="The column containing Latitude information [the name of a column from InputTable]", identifier = "Latitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude_Column(String data) {inputs.put("Latitude_Column",data);}
@LiteralDataInput(abstrakt="Name of the parameter: CSquare_Resolution. The resolution of the CSquare codes", defaultValue="0.1", title="The resolution of the CSquare codes", identifier = "CSquare_Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setCSquare_Resolution(Double data) {inputs.put("CSquare_Resolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="csquaretbl_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,30 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="ESRI_GRID_EXTRACTION", abstrakt="An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION", version = "1.1.0")
public class ESRI_GRID_EXTRACTION extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLat. Lower Left Latitute of the Bounding Box", defaultValue="-60", title="Lower Left Latitute of the Bounding Box", identifier = "BBox_LowerLeftLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLat(Double data) {inputs.put("BBox_LowerLeftLat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLong. Lower Left Longitude of the Bounding Box", defaultValue="-50", title="Lower Left Longitude of the Bounding Box", identifier = "BBox_LowerLeftLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLong(Double data) {inputs.put("BBox_LowerLeftLong",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLat. Upper Right Latitute of the Bounding Box", defaultValue="60", title="Upper Right Latitute of the Bounding Box", identifier = "BBox_UpperRightLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLat(Double data) {inputs.put("BBox_UpperRightLat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLong. Upper Right Longitude of the Bounding Box", defaultValue="50", title="Upper Right Longitude of the Bounding Box", identifier = "BBox_UpperRightLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLong(Double data) {inputs.put("BBox_UpperRightLong",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Projection resolution on the X axis", defaultValue="0.5", title="Projection resolution on the X axis", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Projection resolution on the Y axis", defaultValue="0.5", title="Projection resolution on the Y axis", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: Output ESRI GRID ASCII FILE. Output ESRI GRID ASCII FILE", title="Output ESRI GRID ASCII FILE", identifier = "Output ESRI GRID ASCII FILE", binding = D4ScienceFileDataBinding.class) public GenericFileData getOutput_ESRI_GRID_ASCII_FILE() {URL url=null;try {url = new URL((String) outputs.get("Output ESRI GRID ASCII FILE")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="FAO_OCEAN_AREA_COLUMN_CREATOR", abstrakt="An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR", version = "1.1.0")
public class FAO_OCEAN_AREA_COLUMN_CREATOR extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Longitude_Column. The column containing Longitude information [the name of a column from InputTable]", defaultValue="x", title="The column containing Longitude information [the name of a column from InputTable]", identifier = "Longitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude_Column(String data) {inputs.put("Longitude_Column",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Latitude_Column. The column containing Latitude information [the name of a column from InputTable]", defaultValue="y", title="The column containing Latitude information [the name of a column from InputTable]", identifier = "Latitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude_Column(String data) {inputs.put("Latitude_Column",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. The resolution of the FAO Ocean Area codes", defaultValue="5", title="The resolution of the FAO Ocean Area codes", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setResolution(Integer data) {inputs.put("Resolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="faooceanarea_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,27 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT", abstrakt="An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT", version = "1.1.0")
public class FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Longitude_Column. The column containing Longitude information [the name of a column from InputTable]", defaultValue="x", title="The column containing Longitude information [the name of a column from InputTable]", identifier = "Longitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude_Column(String data) {inputs.put("Longitude_Column",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Latitude_Column. The column containing Latitude information [the name of a column from InputTable]", defaultValue="y", title="The column containing Latitude information [the name of a column from InputTable]", identifier = "Latitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude_Column(String data) {inputs.put("Latitude_Column",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Quadrant_Column. The column containing Quadrant information [the name of a column from InputTable]", defaultValue="quadrant", title="The column containing Quadrant information [the name of a column from InputTable]", identifier = "Quadrant_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuadrant_Column(String data) {inputs.put("Quadrant_Column",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. The resolution of the FAO Ocean Area codes", defaultValue="5", title="The resolution of the FAO Ocean Area codes", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setResolution(Integer data) {inputs.put("Resolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="faooceanarea_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,25 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="GENERIC_CHARTS", abstrakt="An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS", version = "1.1.0")
public class GENERIC_CHARTS extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: TopElementsNumber. Max number of elements, with highest values, to visualize", defaultValue="10", title="Max number of elements, with highest values, to visualize", identifier = "TopElementsNumber", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTopElementsNumber(Integer data) {inputs.put("TopElementsNumber",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Attributes. The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", identifier = "Attributes", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAttributes(String data) {inputs.put("Attributes",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,25 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="GEO_CHART", abstrakt="An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART", version = "1.1.0")
public class GEO_CHART extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Longitude. The column containing longitude decimal values [the name of a column from InputTable]", defaultValue="long", title="The column containing longitude decimal values [the name of a column from InputTable]", identifier = "Longitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude(String data) {inputs.put("Longitude",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Latitude. The column containing latitude decimal values [the name of a column from InputTable]", defaultValue="lat", title="The column containing latitude decimal values [the name of a column from InputTable]", identifier = "Latitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude(String data) {inputs.put("Latitude",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="HCAF_FILTER", abstrakt="An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER", version = "1.1.0")
public class HCAF_FILTER extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Table_Label. the name of the Filtered Hcaf", defaultValue="hcaf_filtered", title="the name of the Filtered Hcaf", identifier = "Table_Label", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTable_Label(String data) {inputs.put("Table_Label",data);}
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Left_Lower_Lat. the left lower latitude of the bounding box (range [-90,+90])", defaultValue="-17.098", title="the left lower latitude of the bounding box (range [-90,+90])", identifier = "B_Box_Left_Lower_Lat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Left_Lower_Lat(Double data) {inputs.put("B_Box_Left_Lower_Lat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Left_Lower_Long. the left lower longitude of the bounding box (range [-180,+180])", defaultValue="89.245", title="the left lower longitude of the bounding box (range [-180,+180])", identifier = "B_Box_Left_Lower_Long", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Left_Lower_Long(Double data) {inputs.put("B_Box_Left_Lower_Long",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Right_Upper_Lat. the right upper latitude of the bounding box (range [-90,+90])", defaultValue="25.086", title="the right upper latitude of the bounding box (range [-90,+90])", identifier = "B_Box_Right_Upper_Lat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Right_Upper_Lat(Double data) {inputs.put("B_Box_Right_Upper_Lat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Right_Upper_Long. the right upper longitude of the bounding box (range [-180,+180])", defaultValue="147.642", title="the right upper longitude of the bounding box (range [-180,+180])", identifier = "B_Box_Right_Upper_Long", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Right_Upper_Long(Double data) {inputs.put("B_Box_Right_Upper_Long",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,39 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="MAX_ENT_NICHE_MODELLING", abstrakt="A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING", version = "1.1.0")
public class MAX_ENT_NICHE_MODELLING extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="maxent_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to", defaultValue="generic_species", title="The name of the species to model and the occurrence records refer to", identifier = "SpeciesName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpeciesName(String data) {inputs.put("SpeciesName",data);}
@LiteralDataInput(abstrakt="Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm", defaultValue="1000", title="The number of learning iterations of the MaxEnt algorithm", identifier = "MaxIterations", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxIterations(Integer data) {inputs.put("MaxIterations",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points", defaultValue="0.5", title="A priori probability of presence at ordinary occurrence points", identifier = "DefaultPrevalence", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setDefaultPrevalence(Double data) {inputs.put("DefaultPrevalence",""+data);}
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", title="A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", maxOccurs=1, minOccurs=1, identifier = "OccurrencesTable", binding = GenericFileDataBinding.class) public void setOccurrencesTable(GenericFileData file) {inputs.put("OccurrencesTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable]", defaultValue="decimallongitude", title="The column containing longitude values [the name of a column from OccurrencesTable]", identifier = "LongitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitudeColumn(String data) {inputs.put("LongitudeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable]", defaultValue="decimallatitude", title="The column containing latitude values [the name of a column from OccurrencesTable]", identifier = "LatitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitudeColumn(String data) {inputs.put("LatitudeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees", defaultValue="1", title="Model projection resolution on the X axis in decimal degrees", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees", defaultValue="1", title="Model projection resolution on the Y axis in decimal degrees", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String)", defaultValue="", title="The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String)", identifier = "Layers", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayers(String data) {inputs.put("Layers",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets", defaultValue="0", title="Time Index. The default is the first time indexed in the input environmental datasets", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
@LiteralDataOutput(abstrakt="Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments", title="Best threshold for transforming MaxEnt values into 0/1 probability assignments", identifier = "Best Threshold", binding = LiteralStringBinding.class) public String getBest_Threshold() {return (String) outputs.get("Best Threshold");}
@LiteralDataOutput(abstrakt="Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species", title="The a posteriori estimated prevalence of the species", identifier = "Estimated Prevalence", binding = LiteralStringBinding.class) public String getEstimated_Prevalence() {return (String) outputs.get("Estimated Prevalence");}
@LiteralDataOutput(abstrakt="Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates", title="The contribution of each variable to the MaxEnt values estimates", identifier = "Variables contributions", binding = LiteralStringBinding.class) public String getVariables_contributions() {return (String) outputs.get("Variables contributions");}
@LiteralDataOutput(abstrakt="Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training", title="The importance of the permutations of the variables during the training", identifier = "Variables Permutations Importance", binding = LiteralStringBinding.class) public String getVariables_Permutations_Importance() {return (String) outputs.get("Variables Permutations Importance");}
@ComplexDataOutput(abstrakt="Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments", title="ASCII Maps of the environmental layers for checking features aligments", identifier = "ASCII Maps of the environmental layers for checking features aligments", binding = D4ScienceFileDataBinding.class) public GenericFileData getASCII_Maps_of_the_environmental_layers_for_checking_features_aligments() {URL url=null;try {url = new URL((String) outputs.get("ASCII Maps of the environmental layers for checking features aligments")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable7", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable7() {URL url=null;try {url = new URL((String) outputs.get("OutputTable7")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,23 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="OBIS_MOST_OBSERVED_SPECIES", abstrakt="An algorithm producing a bar chart for the most observed species in a certain years range (with respect to the OBIS database)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES", version = "1.1.0")
public class OBIS_MOST_OBSERVED_SPECIES extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Species_number. Number of species to report (max 17 will be visualized on the chart)", defaultValue="10", title="Number of species to report (max 17 will be visualized on the chart)", identifier = "Species_number", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies_number(String data) {inputs.put("Species_number",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,24 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="OBIS_MOST_OBSERVED_TAXA", abstrakt="An algorithm producing a bar chart for the most observed taxa in a certain years range (with respect to the OBIS database)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA", version = "1.1.0")
public class OBIS_MOST_OBSERVED_TAXA extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Taxa_number. Number of taxa to report", defaultValue="10", title="Number of taxa to report", identifier = "Taxa_number", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTaxa_number(String data) {inputs.put("Taxa_number",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Level. Choose the taxonomy level", allowedValues= {"GENUS","FAMILY","ORDER","CLASS"}, defaultValue="GENUS", title="Choose the taxonomy level", identifier = "Level", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLevel(String data) {inputs.put("Level",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,24 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA", abstrakt="An algorithm producing a bar chart for the distribution of a species along a certain type of marine area (e.g. LME or MEOW)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA", version = "1.1.0")
public class OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Species. The species to analyze", defaultValue="", title="The species to analyze", identifier = "Species", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies(String data) {inputs.put("Species",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Area. Choose the area type", allowedValues= {"LME","MEOW"}, defaultValue="LME", title="Choose the area type", identifier = "Area", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setArea(String data) {inputs.put("Area",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title=" Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,24 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA", abstrakt="Algorithm returning most observed species in a specific years range (data collected from OBIS database).", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA", version = "1.1.0")
public class OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Area_type. Choose the area name", allowedValues= {"AGULHAS CURRENT","ANTARCTICA","ARABIAN SEA","BALTIC SEA","BARENTS SEA","BAY OF BENGAL","BEAUFORT SEA","BENGUELA CURRENT","BLACK SEA","CALIFORNIA CURRENT","CANARY CURRENT","CARIBBEAN SEA","CELTIC-BISCAY SHELF","CHUKCHI SEA","EAST BERING SEA","EAST BRAZIL SHELF","EAST CENTRAL AUSTRALIAN SHELF","EAST CHINA SEA","EAST GREENLAND SHELF","EAST SIBERIAN SEA","FAROE PLATEAU","GUINEA CURRENT","GULF OF ALASKA","GULF OF CALIFORNIA","GULF OF MEXICO","GULF OF THAILAND","HUDSON BAY","HUMBOLDT CURRENT","IBERIAN COASTAL","ICELAND SHELF","INDONESIAN SEA","INSULAR PACIFIC-HAWAIIAN","KARA SEA","KUROSHIO CURRENT","LAPTEV SEA","MEDITERRANEAN SEA","NEWFOUNDLAND-LABRADOR SHELF","NEW ZEALAND SHELF","NORTH AUSTRALIAN SHELF","NORTH BRAZIL SHELF","NORTHEAST AUSTRALIAN SHELF","NORTHEAST U.S. CONTINENTAL SHELF","NORTH SEA","NORTHWEST AUSTRALIAN SHELF","NORWEGIAN SEA","OYASHIO CURRENT","PACIFIC CENTRAL-AMERICAN COASTAL","PATAGONIAN SHELF","RED SEA","SCOTIAN SHELF","SEA OF JAPAN","SEA OF OKHOTSK","SOMALI COASTAL CURRENT","SOUTH BRAZIL SHELF","SOUTH CHINA SEA","SOUTHEAST AUSTRALIAN SHELF","SOUTHEAST U.S. CONTINENTAL SHELF","SOUTHWEST AUSTRALIAN SHELF","SULU-CELEBES SEA","WEST BERING SEA","WEST CENTRAL AUSTRALIAN SHELF","WEST GREENLAND SHELF","YELLOW SEA"}, defaultValue="AGULHAS CURRENT", title="Choose the area name", identifier = "Area_type", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setArea_type(String data) {inputs.put("Area_type",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Selected species. List of the species to analyze [a sequence of values separated by | ] (format: String)", defaultValue="", title="List of the species to analyze [a sequence of values separated by | ] (format: String)", identifier = "Selected species", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelected_species(String data) {inputs.put("Selected species",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,23 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="OBIS_SPECIES_OBSERVATIONS_PER_YEAR", abstrakt="An algorithm producing the trend of the observations for a certain species in a certain years range.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR", version = "1.1.0")
public class OBIS_SPECIES_OBSERVATIONS_PER_YEAR extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Selected species. List of the species to analyze [a sequence of values separated by | ] (format: String)", defaultValue="", title="List of the species to analyze [a sequence of values separated by | ] (format: String)", identifier = "Selected species", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelected_species(String data) {inputs.put("Selected species",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,24 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="OBIS_TAXA_OBSERVATIONS_PER_YEAR", abstrakt="Algorithm returning most observations taxonomy trend in a specific years range (with respect to the OBIS database)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR", version = "1.1.0")
public class OBIS_TAXA_OBSERVATIONS_PER_YEAR extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Level. Choose the taxonomy level", allowedValues= {"GENUS","FAMILY","ORDER","CLASS"}, defaultValue="GENUS", title="Choose the taxonomy level", identifier = "Level", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLevel(String data) {inputs.put("Level",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Selected taxonomy. List of taxa to analyze [a sequence of values separated by | ] (format: String)", defaultValue="", title="List of taxa to analyze [a sequence of values separated by | ] (format: String)", identifier = "Selected taxonomy", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelected_taxonomy(String data) {inputs.put("Selected taxonomy",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,31 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="OCCURRENCE_ENRICHMENT", abstrakt="An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT", version = "1.1.0")
public class OCCURRENCE_ENRICHMENT extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: OccurrenceTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", title="A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", maxOccurs=1, minOccurs=1, identifier = "OccurrenceTable", binding = GenericFileDataBinding.class) public void setOccurrenceTable(GenericFileData file) {inputs.put("OccurrenceTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrenceTable]", defaultValue="decimallongitude", title="The column containing longitude values [the name of a column from OccurrenceTable]", identifier = "LongitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitudeColumn(String data) {inputs.put("LongitudeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrenceTable]", defaultValue="decimallatitude", title="The column containing latitude values [the name of a column from OccurrenceTable]", identifier = "LatitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitudeColumn(String data) {inputs.put("LatitudeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: ScientificNameColumn. The column containing Scientific Names [the name of a column from OccurrenceTable]", defaultValue="scientificname", title="The column containing Scientific Names [the name of a column from OccurrenceTable]", identifier = "ScientificNameColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setScientificNameColumn(String data) {inputs.put("ScientificNameColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeColumn. The column containing time information [the name of a column from OccurrenceTable]", defaultValue="eventdate", title="The column containing time information [the name of a column from OccurrenceTable]", identifier = "TimeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTimeColumn(String data) {inputs.put("TimeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OptionalFilter. A filter on one of the columns (e.g. basisofrecord='HumanObservation'). Optional", defaultValue=" ", title="A filter on one of the columns (e.g. basisofrecord='HumanObservation'). Optional", identifier = "OptionalFilter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOptionalFilter(String data) {inputs.put("OptionalFilter",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. The spatial resolution in degrees of the association between observations and environmental features", defaultValue="0.5", title="The spatial resolution in degrees of the association between observations and environmental features", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="enrich_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ) [a sequence of values separated by | ] (format: String)", defaultValue="", title="The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ) [a sequence of values separated by | ] (format: String)", identifier = "Layers", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayers(String data) {inputs.put("Layers",data);}
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesNames. The list of names for the columns corresponding to the environmental layers. These will be the column names of the resulting table [a sequence of values separated by | ] (format: String)", defaultValue="", title="The list of names for the columns corresponding to the environmental layers. These will be the column names of the resulting table [a sequence of values separated by | ] (format: String)", identifier = "FeaturesNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesNames(String data) {inputs.put("FeaturesNames",data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,24 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="PRESENCE_CELLS_GENERATION", abstrakt="An algorithm producing cells and features (HCAF) for a species containing presence points", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION", version = "1.1.0")
public class PRESENCE_CELLS_GENERATION extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Table_Label. the name of the Filtered Hcaf", defaultValue="PresenceCells_", title="the name of the Filtered Hcaf", identifier = "Table_Label", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTable_Label(String data) {inputs.put("Table_Label",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Number_of_Points. Maximum number of points to take (-1 to take all)", defaultValue="-1", title="Maximum number of points to take (-1 to take all)", identifier = "Number_of_Points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setNumber_of_Points(Integer data) {inputs.put("Number_of_Points",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Species_Code. the species code according to the Fish-Base conventions", defaultValue="Fis-30189", title="the species code according to the Fish-Base conventions", identifier = "Species_Code", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies_Code(String data) {inputs.put("Species_Code",data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. a HCAF table containing Presence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="a HCAF table containing Presence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,28 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="RASTER_DATA_PUBLISHER", abstrakt="This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER", version = "1.1.0")
public class RASTER_DATA_PUBLISHER extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: DatasetTitle. Title of the geospatial dataset to be shown on GeoExplorer", defaultValue="Generic Raster Layer", title="Title of the geospatial dataset to be shown on GeoExplorer", identifier = "DatasetTitle", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDatasetTitle(String data) {inputs.put("DatasetTitle",data);}
@LiteralDataInput(abstrakt="Name of the parameter: DatasetAbstract. Abstract defining the content, the references and usage policies", defaultValue="Abstract", title="Abstract defining the content, the references and usage policies", identifier = "DatasetAbstract", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDatasetAbstract(String data) {inputs.put("DatasetAbstract",data);}
@LiteralDataInput(abstrakt="Name of the parameter: InnerLayerName. Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files)", defaultValue="band_1", title="Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files)", identifier = "InnerLayerName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setInnerLayerName(String data) {inputs.put("InnerLayerName",data);}
@LiteralDataInput(abstrakt="Name of the parameter: FileNameOnInfra. Name of the file that will be created in the infrastructures", defaultValue="raster-1458666673377.nc", title="Name of the file that will be created in the infrastructures", identifier = "FileNameOnInfra", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFileNameOnInfra(String data) {inputs.put("FileNameOnInfra",data);}
@ComplexDataInput(abstrakt="Name of the parameter: RasterFile. Raster dataset to process", title="Raster dataset to process", maxOccurs=1, minOccurs=1, identifier = "RasterFile", binding = D4ScienceDataInputBinding.class) public void setRasterFile(GenericFileData file) {inputs.put("RasterFile",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Topics. Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String)", defaultValue="", title="Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String)", identifier = "Topics", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTopics(String data) {inputs.put("Topics",data);}
@LiteralDataInput(abstrakt="Name of the parameter: SpatialResolution. The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1)", defaultValue="-1d", title="The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1)", identifier = "SpatialResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setSpatialResolution(Double data) {inputs.put("SpatialResolution",""+data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,33 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="SGVM_INTERPOLATION", abstrakt="An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION", version = "1.1.0")
public class SGVM_INTERPOLATION extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputFile. Input file in TACSAT format. E.g. http://goo.gl/i16kPw", title="Input file in TACSAT format. E.g. http://goo.gl/i16kPw", maxOccurs=1, minOccurs=1, identifier = "InputFile", binding = D4ScienceDataInputBinding.class) public void setInputFile(GenericFileData file) {inputs.put("InputFile",file);}
@LiteralDataInput(abstrakt="Name of the parameter: npoints. The number of pings or positions required between each real or actual vessel position or ping", defaultValue="10", title="The number of pings or positions required between each real or actual vessel position or ping", identifier = "npoints", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setnpoints(Integer data) {inputs.put("npoints",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: interval. Average time in minutes between two adjacent datapoints", defaultValue="120", title="Average time in minutes between two adjacent datapoints", identifier = "interval", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setinterval(Integer data) {inputs.put("interval",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: margin. Maximum deviation from specified interval to find adjacent datapoints (tolerance)", defaultValue="10", title="Maximum deviation from specified interval to find adjacent datapoints (tolerance)", identifier = "margin", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmargin(Integer data) {inputs.put("margin",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: res. Number of points to use to create interpolation (including start and end point)", defaultValue="100", title="Number of points to use to create interpolation (including start and end point)", identifier = "res", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setres(Integer data) {inputs.put("res",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: method. Set to cHs for cubic Hermite spline or SL for Straight Line interpolation", allowedValues= {"cHs","SL"}, defaultValue="cHs", title="Set to cHs for cubic Hermite spline or SL for Straight Line interpolation", identifier = "method", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setmethod(String data) {inputs.put("method",data);}
@LiteralDataInput(abstrakt="Name of the parameter: fm. The FM parameter in cubic interpolation", defaultValue="0.5", title="The FM parameter in cubic interpolation", identifier = "fm", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setfm(Double data) {inputs.put("fm",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: distscale. The DistScale parameter for cubic interpolation", defaultValue="20", title="The DistScale parameter for cubic interpolation", identifier = "distscale", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setdistscale(Integer data) {inputs.put("distscale",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: sigline. The Sigline parameter in cubic interpolation", defaultValue="0.2", title="The Sigline parameter in cubic interpolation", identifier = "sigline", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setsigline(Double data) {inputs.put("sigline",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: minspeedThr. A filter on the minimum speed to take into account for interpolation", defaultValue="2", title="A filter on the minimum speed to take into account for interpolation", identifier = "minspeedThr", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setminspeedThr(Double data) {inputs.put("minspeedThr",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: maxspeedThr. A filter on the maximum speed to take into account for interpolation", defaultValue="6", title="A filter on the maximum speed to take into account for interpolation", identifier = "maxspeedThr", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setmaxspeedThr(Double data) {inputs.put("maxspeedThr",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: headingAdjustment. Parameter to adjust the choice of heading depending on its own or previous point (0 or 1). Set 1 in case the heading at the endpoint does not represent the heading of the arriving vessel to that point but the departing vessel.", defaultValue="0", title="Parameter to adjust the choice of heading depending on its own or previous point (0 or 1). Set 1 in case the heading at the endpoint does not represent the heading of the arriving vessel to that point but the departing vessel.", identifier = "headingAdjustment", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setheadingAdjustment(Integer data) {inputs.put("headingAdjustment",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: equalDist. Whether the number of positions returned should be equally spaced or not", defaultValue="true", allowedValues= {"true","false"}, title="Whether the number of positions returned should be equally spaced or not", identifier = "equalDist", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setequalDist(Boolean data) {inputs.put("equalDist",""+data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,27 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="SUBMITQUERY", abstrakt="Algorithm that allows to submit a query", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY", version = "1.1.0")
public class SUBMITQUERY extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: ResourceName. The name of the resource", defaultValue="", title="The name of the resource", identifier = "ResourceName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setResourceName(String data) {inputs.put("ResourceName",data);}
@LiteralDataInput(abstrakt="Name of the parameter: DatabaseName. The name of the database", defaultValue="", title="The name of the database", identifier = "DatabaseName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDatabaseName(String data) {inputs.put("DatabaseName",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Read-Only Query. Check the box if the query must be read-only", defaultValue="true", allowedValues= {"true","false"}, title="Check the box if the query must be read-only", identifier = "Read-Only Query", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setRead_Only_Query(Boolean data) {inputs.put("Read-Only Query",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Apply Smart Correction. Check the box for smart correction", defaultValue="true", allowedValues= {"true","false"}, title="Check the box for smart correction", identifier = "Apply Smart Correction", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setApply_Smart_Correction(Boolean data) {inputs.put("Apply Smart Correction",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Language. Language", allowedValues= {"NONE","POSTGRES","MYSQL"}, defaultValue="NONE", title="Language", identifier = "Language", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLanguage(String data) {inputs.put("Language",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Query. query", defaultValue="", title="query", identifier = "Query", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuery(String data) {inputs.put("Query",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,29 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="TIMEEXTRACTION", abstrakt="An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION", version = "1.1.0")
public class TIMEEXTRACTION extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Extraction point resolution", defaultValue="0.5", title="Extraction point resolution", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: SamplingFreq. Sampling frequency in Hz. Leave it to -1 if unknown or under 1", defaultValue="-1", title="Sampling frequency in Hz. Leave it to -1 if unknown or under 1", identifier = "SamplingFreq", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSamplingFreq(Integer data) {inputs.put("SamplingFreq",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@LiteralDataOutput(abstrakt="Name of the parameter: Note. Note about the signal", title="Note about the signal", identifier = "Note", binding = LiteralStringBinding.class) public String getNote() {return (String) outputs.get("Note");}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,35 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="TIMEEXTRACTION_TABLE", abstrakt="An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE", version = "1.1.0")
public class TIMEEXTRACTION_TABLE extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: geoReferencedTableName. A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "geoReferencedTableName", binding = GenericFileDataBinding.class) public void setgeoReferencedTableName(GenericFileData file) {inputs.put("geoReferencedTableName",file);}
@LiteralDataInput(abstrakt="Name of the parameter: xColumn. The column containing x (longitude) information [the name of a column from geoReferencedTableName]", defaultValue="x", title="The column containing x (longitude) information [the name of a column from geoReferencedTableName]", identifier = "xColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setxColumn(String data) {inputs.put("xColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: yColumn. The column containing y (latitude) information [the name of a column from geoReferencedTableName]", defaultValue="y", title="The column containing y (latitude) information [the name of a column from geoReferencedTableName]", identifier = "yColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setyColumn(String data) {inputs.put("yColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: timeColumn. The column containing time information [the name of a column from geoReferencedTableName]", defaultValue="datetime", title="The column containing time information [the name of a column from geoReferencedTableName]", identifier = "timeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void settimeColumn(String data) {inputs.put("timeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: valueColumn. A column containing real valued features [the name of a column from geoReferencedTableName]", defaultValue="value", title="A column containing real valued features [the name of a column from geoReferencedTableName]", identifier = "valueColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setvalueColumn(String data) {inputs.put("valueColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: filter. A filter on one of the columns (e.g. speed=2)", defaultValue=" ", title="A filter on one of the columns (e.g. speed=2)", identifier = "filter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setfilter(String data) {inputs.put("filter",data);}
@LiteralDataInput(abstrakt="Name of the parameter: zColumn. The column containing z (altitude or depth) information (optional)", defaultValue="z", title="The column containing z (altitude or depth) information (optional)", identifier = "zColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setzColumn(String data) {inputs.put("zColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Extraction point resolution", defaultValue="0.5", title="Extraction point resolution", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: SamplingFreq. Sampling frequency in Hz. Leave it to -1 if unknown or under 1", defaultValue="-1", title="Sampling frequency in Hz. Leave it to -1 if unknown or under 1", identifier = "SamplingFreq", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSamplingFreq(Integer data) {inputs.put("SamplingFreq",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@LiteralDataOutput(abstrakt="Name of the parameter: Note. Note about the signal", title="Note about the signal", identifier = "Note", binding = LiteralStringBinding.class) public String getNote() {return (String) outputs.get("Note");}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,26 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="TIME_GEO_CHART", abstrakt="An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART", version = "1.1.0")
public class TIME_GEO_CHART extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Longitude. The column containing longitude decimal values [the name of a column from InputTable]", defaultValue="long", title="The column containing longitude decimal values [the name of a column from InputTable]", identifier = "Longitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude(String data) {inputs.put("Longitude",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Latitude. The column containing latitude decimal values [the name of a column from InputTable]", defaultValue="lat", title="The column containing latitude decimal values [the name of a column from InputTable]", identifier = "Latitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude(String data) {inputs.put("Latitude",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Time. The column containing time information [the name of a column from InputTable]", defaultValue="year", title="The column containing time information [the name of a column from InputTable]", identifier = "Time", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTime(String data) {inputs.put("Time",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,29 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="TIME_SERIES_ANALYSIS", abstrakt="An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS", version = "1.1.0")
public class TIME_SERIES_ANALYSIS extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: TimeSeriesTable. The table containing the time series [a http link to a table in UTF-8 encoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="The table containing the time series [a http link to a table in UTF-8 encoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", maxOccurs=1, minOccurs=1, identifier = "TimeSeriesTable", binding = GenericFileDataBinding.class) public void setTimeSeriesTable(GenericFileData file) {inputs.put("TimeSeriesTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: ValueColum. The column containing the values of the time series [the name of a column from TimeSeriesTable]", defaultValue="values", title="The column containing the values of the time series [the name of a column from TimeSeriesTable]", identifier = "ValueColum", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setValueColum(String data) {inputs.put("ValueColum",data);}
@LiteralDataInput(abstrakt="Name of the parameter: FFT_Window_Samples. The number of samples N on which the Fourier Transform (FFT) will be extracted. It should be a power of two and less than the signal length, otherwise it will be automatically recalculated. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", defaultValue="12", title="The number of samples N on which the Fourier Transform (FFT) will be extracted. It should be a power of two and less than the signal length, otherwise it will be automatically recalculated. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", identifier = "FFT_Window_Samples", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setFFT_Window_Samples(Integer data) {inputs.put("FFT_Window_Samples",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: AggregationFunction. Function to apply to samples with the same time instant", allowedValues= {"SUM","AVG"}, defaultValue="SUM", title="Function to apply to samples with the same time instant", identifier = "AggregationFunction", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAggregationFunction(String data) {inputs.put("AggregationFunction",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Sensitivity. Sensitivity to the frequency components. High sensitivity will report all the frequency components, low sensitivity will report only the most distant ones.", allowedValues= {"LOW","NORMAL","HIGH"}, defaultValue="LOW", title="Sensitivity to the frequency components. High sensitivity will report all the frequency components, low sensitivity will report only the most distant ones.", identifier = "Sensitivity", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSensitivity(String data) {inputs.put("Sensitivity",data);}
@LiteralDataInput(abstrakt="Name of the parameter: SSA_Window_in_Samples. The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. Must be strictly less than the Time Series length. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", defaultValue="20", title="The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. Must be strictly less than the Time Series length. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", identifier = "SSA_Window_in_Samples", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSSA_Window_in_Samples(Integer data) {inputs.put("SSA_Window_in_Samples",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: SSA_EigenvaluesThreshold. The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", defaultValue="0.7", title="The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", identifier = "SSA_EigenvaluesThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setSSA_EigenvaluesThreshold(Double data) {inputs.put("SSA_EigenvaluesThreshold",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: SSA_Points_to_Forecast. The number of points to forecast over the original length of the time series", defaultValue="10", title="The number of points to forecast over the original length of the time series", identifier = "SSA_Points_to_Forecast", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSSA_Points_to_Forecast(Integer data) {inputs.put("SSA_Points_to_Forecast",""+data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,25 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="TIME_SERIES_CHARTS", abstrakt="An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS", version = "1.1.0")
public class TIME_SERIES_CHARTS extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
@LiteralDataInput(abstrakt="Name of the parameter: Attributes. The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", identifier = "Attributes", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAttributes(String data) {inputs.put("Attributes",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Time. The column containing time information [the name of a column from InputTable]", defaultValue="year", title="The column containing time information [the name of a column from InputTable]", identifier = "Time", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTime(String data) {inputs.put("Time",data);}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,31 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="XYEXTRACTOR", abstrakt="An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR", version = "1.1.0")
public class XYEXTRACTOR extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLat. Lower Left Latitute of the Bounding Box", defaultValue="-60", title="Lower Left Latitute of the Bounding Box", identifier = "BBox_LowerLeftLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLat(Double data) {inputs.put("BBox_LowerLeftLat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLong. Lower Left Longitude of the Bounding Box", defaultValue="-50", title="Lower Left Longitude of the Bounding Box", identifier = "BBox_LowerLeftLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLong(Double data) {inputs.put("BBox_LowerLeftLong",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLat. Upper Right Latitute of the Bounding Box", defaultValue="60", title="Upper Right Latitute of the Bounding Box", identifier = "BBox_UpperRightLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLat(Double data) {inputs.put("BBox_UpperRightLat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLong. Upper Right Longitude of the Bounding Box", defaultValue="50", title="Upper Right Longitude of the Bounding Box", identifier = "BBox_UpperRightLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLong(Double data) {inputs.put("BBox_UpperRightLong",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Projection resolution on the X axis", defaultValue="0.5", title="Projection resolution on the X axis", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Projection resolution on the Y axis", defaultValue="0.5", title="Projection resolution on the Y axis", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,37 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="XYEXTRACTOR_TABLE", abstrakt="An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE", version = "1.1.0")
public class XYEXTRACTOR_TABLE extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: geoReferencedTableName. A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "geoReferencedTableName", binding = GenericFileDataBinding.class) public void setgeoReferencedTableName(GenericFileData file) {inputs.put("geoReferencedTableName",file);}
@LiteralDataInput(abstrakt="Name of the parameter: xColumn. The column containing x (longitude) information [the name of a column from geoReferencedTableName]", defaultValue="x", title="The column containing x (longitude) information [the name of a column from geoReferencedTableName]", identifier = "xColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setxColumn(String data) {inputs.put("xColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: yColumn. The column containing y (latitude) information [the name of a column from geoReferencedTableName]", defaultValue="y", title="The column containing y (latitude) information [the name of a column from geoReferencedTableName]", identifier = "yColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setyColumn(String data) {inputs.put("yColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: valueColumn. A column containing real valued features [the name of a column from geoReferencedTableName]", defaultValue="value", title="A column containing real valued features [the name of a column from geoReferencedTableName]", identifier = "valueColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setvalueColumn(String data) {inputs.put("valueColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: filter. A filter on one of the columns (e.g. speed=2)", defaultValue=" ", title="A filter on one of the columns (e.g. speed=2)", identifier = "filter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setfilter(String data) {inputs.put("filter",data);}
@LiteralDataInput(abstrakt="Name of the parameter: zColumn. The column containing z (altitude or depth) information (optional)", defaultValue="z", title="The column containing z (altitude or depth) information (optional)", identifier = "zColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setzColumn(String data) {inputs.put("zColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: timeColumn. The column containing time (otional)", defaultValue="datetime", title="The column containing time (otional)", identifier = "timeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void settimeColumn(String data) {inputs.put("timeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLat. Lower Left Latitute of the Bounding Box", defaultValue="-60", title="Lower Left Latitute of the Bounding Box", identifier = "BBox_LowerLeftLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLat(Double data) {inputs.put("BBox_LowerLeftLat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLong. Lower Left Longitude of the Bounding Box", defaultValue="-50", title="Lower Left Longitude of the Bounding Box", identifier = "BBox_LowerLeftLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLong(Double data) {inputs.put("BBox_LowerLeftLong",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLat. Upper Right Latitute of the Bounding Box", defaultValue="60", title="Upper Right Latitute of the Bounding Box", identifier = "BBox_UpperRightLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLat(Double data) {inputs.put("BBox_UpperRightLat",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLong. Upper Right Longitude of the Bounding Box", defaultValue="50", title="Upper Right Longitude of the Bounding Box", identifier = "BBox_UpperRightLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLong(Double data) {inputs.put("BBox_UpperRightLong",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Projection resolution on the X axis", defaultValue="0.5", title="Projection resolution on the X axis", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Projection resolution on the Y axis", defaultValue="0.5", title="Projection resolution on the Y axis", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,27 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="ZEXTRACTION", abstrakt="An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION", version = "1.1.0")
public class ZEXTRACTION extends AbstractEcologicalEngineMapper implements ITransducer{
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Step for Z values", defaultValue="100", title="Step for Z values", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -1,33 +0,0 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
import java.io.File;
import java.net.URL;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.io.StringWriter;
import org.apache.commons.io.IOUtils;
import org.apache.xmlbeans.XmlObject;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
import org.n52.wps.algorithm.annotation.*;
import org.n52.wps.io.data.*;
import org.n52.wps.io.data.binding.complex.*;
import org.n52.wps.io.data.binding.literal.*;
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
@Algorithm(statusSupported=true, title="ZEXTRACTION_TABLE", abstrakt="An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE", version = "1.1.0")
public class ZEXTRACTION_TABLE extends AbstractEcologicalEngineMapper implements ITransducer{
@ComplexDataInput(abstrakt="Name of the parameter: geoReferencedTableName. A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "geoReferencedTableName", binding = GenericFileDataBinding.class) public void setgeoReferencedTableName(GenericFileData file) {inputs.put("geoReferencedTableName",file);}
@LiteralDataInput(abstrakt="Name of the parameter: xColumn. The column containing x (longitude) information [the name of a column from geoReferencedTableName]", defaultValue="x", title="The column containing x (longitude) information [the name of a column from geoReferencedTableName]", identifier = "xColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setxColumn(String data) {inputs.put("xColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: yColumn. The column containing y (latitude) information [the name of a column from geoReferencedTableName]", defaultValue="y", title="The column containing y (latitude) information [the name of a column from geoReferencedTableName]", identifier = "yColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setyColumn(String data) {inputs.put("yColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: zColumn. The column containing z information [the name of a column from geoReferencedTableName]", defaultValue="z", title="The column containing z information [the name of a column from geoReferencedTableName]", identifier = "zColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setzColumn(String data) {inputs.put("zColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: valueColumn. A column containing real valued features [the name of a column from geoReferencedTableName]", defaultValue="value", title="A column containing real valued features [the name of a column from geoReferencedTableName]", identifier = "valueColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setvalueColumn(String data) {inputs.put("valueColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: filter. A filter on one of the columns (e.g. speed=2)", defaultValue=" ", title="A filter on one of the columns (e.g. speed=2)", identifier = "filter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setfilter(String data) {inputs.put("filter",data);}
@LiteralDataInput(abstrakt="Name of the parameter: timeColumn. The column containing time information (optional).", defaultValue="time", title="The column containing time information (optional).", identifier = "timeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void settimeColumn(String data) {inputs.put("timeColumn",data);}
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Step for Z values", defaultValue="100", title="Step for Z values", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
@Execute public void run() throws Exception { super.run(); } }

View File

@ -279,13 +279,16 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm {
}
}
}
public void updateComputationOnWS(float status, String exception) {
updateComputationOnWS(status, exception, null, null);
}
public void updateComputationOnWS(float status, String exception, List<StoredData> inputData, List<File> generatedData) {
if (currentComputation != null) {
currentComputation.setStatus(""+status);
if (exception!=null && exception.length()>0)
currentComputation.setException(exception);
DataspaceManager manager = new DataspaceManager(config, currentComputation, null, null, null);
DataspaceManager manager = new DataspaceManager(config, currentComputation, inputData, null, generatedData);
try {
manager.writeRunningComputationData();
} catch (Exception ez) {
@ -294,7 +297,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm {
}
}
}
@Execute
public void run() throws Exception {
String algorithm = "";
@ -308,7 +311,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm {
computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + wpsExternalID;
} else
AnalysisLogger.getLogger().info("Wps External ID not set");
InputsManager inputsManager = null;
try {
// wait for server resources to be available
@ -349,7 +352,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm {
} else
AnalysisLogger.getLogger().info("Using cached database information: " + supportDatabaseInfo);
AnalysisLogger.getLogger().info("Retrieved Central Database: " + supportDatabaseInfo);
InputsManager inputsManager = new InputsManager(inputs, config, computationSession);
inputsManager = new InputsManager(inputs, config, computationSession);
inputsManager.configSupportDatabaseParameters(supportDatabaseInfo);
time("Central database information retrieval");
// retrieve the algorithm to execute
@ -430,7 +433,11 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm {
AnalysisLogger.getLogger().debug("Error in Algorithm execution: " + algorithm);
AnalysisLogger.getLogger().debug(e);
e.printStackTrace();
updateComputationOnWS(-2,e.getMessage());
if (inputsManager!=null)
updateComputationOnWS(-2,e.getMessage(),inputsManager.getProvenanceData(),generatedFiles);
else
updateComputationOnWS(-2,e.getMessage());
throw e;
} finally {
AnalysisLogger.getLogger().debug("Deleting Input Tables");

View File

@ -121,7 +121,7 @@ public class DataspaceManager implements Runnable {
try {
if (data.type == StoredType.DATA) {
if (new File(data.payload).exists()) {
if (new File(data.payload).exists() || !data.payload.startsWith("http")) {
AnalysisLogger.getLogger().debug("Dataspace->Uploading file " + data.payload);
in = new FileInputStream(new File(data.payload));
} else {

View File

@ -1,15 +1,9 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.test;
import java.io.File;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS;
import org.junit.Test;
import org.n52.wps.io.data.GenericFileData;
public class TestMappedClusterers {
/*
@Test
public void testDBSCAN() throws Exception{
DBSCAN algorithm = new DBSCAN();
@ -43,6 +37,6 @@ public class TestMappedClusterers {
algorithm.run();
}
*/
}

View File

@ -1,15 +1,9 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.test;
import java.io.File;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON;
import org.junit.Test;
import org.n52.wps.io.data.GenericFileData;
public class TestMappedEvaluators {
/*
@Test
public void testMAPS_COMPARISON() throws Exception{
MAPS_COMPARISON algorithm = new MAPS_COMPARISON();
@ -62,5 +56,5 @@ public class TestMappedEvaluators {
algorithm.run();
}
*/
}

View File

@ -1,10 +1,5 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.test;
import java.io.File;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM;
import org.junit.Test;
import org.n52.wps.io.data.GenericFileData;
public class TestMappedGenerators {
@ -27,7 +22,7 @@ public class TestMappedGenerators {
algorithm.run();
}
*/
/*
@Test
public void testBIONYM() throws Exception{
BIONYM algorithm = new BIONYM();
@ -47,4 +42,5 @@ public class TestMappedGenerators {
algorithm.setMaxResults_1(3);
algorithm.run();
}
*/
}

View File

@ -1,29 +1,9 @@
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.test;
import java.io.File;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS;
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR;
import org.junit.Test;
import org.n52.wps.io.data.GenericFileData;
public class TestMappedTransducerers {
/*
@Test
public void testBionymLocal() throws Exception{
BIONYM_LOCAL algorithm = new BIONYM_LOCAL();
@ -41,6 +21,7 @@ public class TestMappedTransducerers {
algorithm.setMaxResults_1(3);
algorithm.run();
}
*/
/*
@Test
@ -99,6 +80,8 @@ public class TestMappedTransducerers {
algorithm.run();
}
*/
/*
@Test
public void testHCAF_FILTER() throws Exception{
HCAF_FILTER algorithm = new HCAF_FILTER();
@ -227,7 +210,7 @@ public class TestMappedTransducerers {
algorithm.run();
}
*/
/*
@Test
public void testPOINTS_TO_MAP() throws Exception{
@ -244,7 +227,7 @@ public class TestMappedTransducerers {
algorithm.run();
}
*/
/*
@Test
public void testOCCURRENCE_ENRICHMENT() throws Exception{
OCCURRENCE_ENRICHMENT algorithm = new OCCURRENCE_ENRICHMENT();
@ -370,4 +353,6 @@ public class TestMappedTransducerers {
algorithm.run();
}
*/
}