diff --git a/.classpath b/.classpath
new file mode 100644
index 0000000..b008e62
--- /dev/null
+++ b/.classpath
@@ -0,0 +1,31 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/.project b/.project
new file mode 100644
index 0000000..1f15cd4
--- /dev/null
+++ b/.project
@@ -0,0 +1,23 @@
+
+
+ DataminerAlgorithmsInstaller1.2
+
+
+
+
+
+ org.eclipse.jdt.core.javabuilder
+
+
+
+
+ org.eclipse.m2e.core.maven2Builder
+
+
+
+
+
+ org.eclipse.m2e.core.maven2Nature
+ org.eclipse.jdt.core.javanature
+
+
diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs
new file mode 100644
index 0000000..365bbd6
--- /dev/null
+++ b/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,5 @@
+eclipse.preferences.version=1
+encoding//src/main/java=UTF-8
+encoding//src/main/resources=UTF-8
+encoding//src/test/java=UTF-8
+encoding/=UTF-8
diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..fd9afef
--- /dev/null
+++ b/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
diff --git a/.settings/org.eclipse.ltk.core.refactoring.prefs b/.settings/org.eclipse.ltk.core.refactoring.prefs
new file mode 100644
index 0000000..a3f7fe4
--- /dev/null
+++ b/.settings/org.eclipse.ltk.core.refactoring.prefs
@@ -0,0 +1,3 @@
+#Wed Apr 27 12:48:22 CEST 2016
+eclipse.preferences.version=1
+org.eclipse.ltk.core.refactoring.enable.project.refactoring.history=false
diff --git a/.settings/org.eclipse.m2e.core.prefs b/.settings/org.eclipse.m2e.core.prefs
new file mode 100644
index 0000000..634e9e7
--- /dev/null
+++ b/.settings/org.eclipse.m2e.core.prefs
@@ -0,0 +1,5 @@
+#Tue Apr 26 20:08:03 CEST 2016
+activeProfiles=
+eclipse.preferences.version=1
+resolveWorkspaceProjects=true
+version=1
diff --git a/distro/LICENSE b/distro/LICENSE
new file mode 100644
index 0000000..3695e26
--- /dev/null
+++ b/distro/LICENSE
@@ -0,0 +1 @@
+${gcube.license}
diff --git a/distro/README b/distro/README
new file mode 100644
index 0000000..24144e6
--- /dev/null
+++ b/distro/README
@@ -0,0 +1,69 @@
+The gCube System - ${name}
+--------------------------------------------------
+
+${description}
+
+
+${gcube.description}
+
+${gcube.funding}
+
+
+Version
+--------------------------------------------------
+
+${version} (${buildDate})
+
+Please see the file named "changelog.xml" in this directory for the release notes.
+
+
+Authors
+--------------------------------------------------
+
+* Gianpaolo Coro (gianpaolo.coro-AT-isti.cnr.it),
+ Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
+
+
+Maintainers
+-----------
+
+* Gianpaolo Coro (gianpaolo.coro-AT-isti.cnr.it),
+ Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
+
+
+Download information
+--------------------------------------------------
+
+Source code is available from SVN:
+ ${scm.url}
+
+Binaries can be downloaded from the gCube website:
+ ${gcube.website}
+
+
+Installation
+--------------------------------------------------
+
+Installation documentation is available on-line in the gCube Wiki:
+ https://wiki.gcube-system.org/gcube
+
+
+Documentation
+--------------------------------------------------
+
+Documentation is available on-line in the gCube Wiki:
+ https://wiki.gcube-system.org/gcube/Ecological_Modeling
+
+
+Support
+--------------------------------------------------
+
+Bugs and support requests can be reported in the gCube issue tracking tool:
+ ${gcube.issueTracking}
+
+
+Licensing
+--------------------------------------------------
+
+This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
+
diff --git a/distro/addAlgorithm.sh b/distro/addAlgorithm.sh
new file mode 100644
index 0000000..802fbb2
--- /dev/null
+++ b/distro/addAlgorithm.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+INFRA_ENV=$( egrep ^INFRA_REFERENCE= /usr/local/bin/algorithms-updater | cut -d = -f 2 )/software
+if [ ! -z $9 ] ; then
+ INFRA_ENV=$9
+fi
+echo $# arguments to $0: $*
+java -cp ../tomcat/webapps/wps/WEB-INF/lib/*:../tomcat/lib/*:./*:../wps_algorithms/algorithms/$INFRA_ENV org.gcube.dataanalysis.wps.mapper.DataMinerUpdater -a$1 -l../wps_algorithms/algorithms/$INFRA_ENV -t$2 -i$3 -c../tomcat/webapps/wps/ecocfg/ -s$4 -e$5 -k$6 -u$7 -d$8
diff --git a/distro/algorithmTemplate b/distro/algorithmTemplate
new file mode 100644
index 0000000..9075a37
--- /dev/null
+++ b/distro/algorithmTemplate
@@ -0,0 +1,2 @@
+TRANSDUCERS
+EmptyEmptystring
diff --git a/distro/assembly.xml b/distro/assembly.xml
new file mode 100644
index 0000000..f7fd8fd
--- /dev/null
+++ b/distro/assembly.xml
@@ -0,0 +1,30 @@
+
+ algorithm-installer-bundle
+
+ tar.gz
+
+ algorithmInstaller
+
+
+ .
+ ${distroDirectory}
+ true
+
+ addAlgorithm.sh
+ algorithmTemplate
+
+ 755
+ true
+
+
+ target
+ .
+
+ dataminer-algorithms-importer*.jar
+
+
+
+
\ No newline at end of file
diff --git a/distro/changelog.xml b/distro/changelog.xml
new file mode 100644
index 0000000..1a1de46
--- /dev/null
+++ b/distro/changelog.xml
@@ -0,0 +1,5 @@
+
+
+ First Release
+
+
\ No newline at end of file
diff --git a/distro/descriptor.xml b/distro/descriptor.xml
new file mode 100644
index 0000000..0eec2ec
--- /dev/null
+++ b/distro/descriptor.xml
@@ -0,0 +1,32 @@
+
+ servicearchive
+
+ tar.gz
+
+ /
+
+
+ ${distroDirectory}
+ /
+ true
+
+ README
+ LICENSE
+ changelog.xml
+ profile.xml
+
+ 755
+ true
+
+
+
+
+
+ /${artifactId}
+
+
+
+
diff --git a/distro/profile.xml b/distro/profile.xml
new file mode 100644
index 0000000..08b2fab
--- /dev/null
+++ b/distro/profile.xml
@@ -0,0 +1,30 @@
+
+
+
+ Service
+
+ ${project.description}
+ DataAnalysis
+ ${project.name}
+ 1.0.0
+
+
+ ${project.name}
+ ${project.description}
+ ${version}
+
+ ${project.groupId}
+ ${project.artifactId}
+ ${project.version}
+
+ Service
+
+ ${project.build.finalName}.${project.packaging}
+
+
+
+
+
+
+
+
diff --git a/package/algorithmInstaller1_1.zip b/package/algorithmInstaller1_1.zip
new file mode 100644
index 0000000..1913927
Binary files /dev/null and b/package/algorithmInstaller1_1.zip differ
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..b57c6e2
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,119 @@
+
+ 4.0.0
+
+ maven-parent
+ org.gcube.tools
+ 1.0.0
+
+
+ org.gcube.dataanalysis
+ dataminer-algorithms-importer
+ 1.2.0-SNAPSHOT
+ dataminer-algorithms-importer
+ Algorithms for the dataminer service
+
+ https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataminerAlgorithms
+
+
+
+ Gianpaolo Coro
+ gianpaolo.coro@isti.cnr.it
+ CNR Pisa, Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
+
+ architect
+ developer
+
+
+
+
+ ${project.build.directory}/${project.build.finalName}
+ distro
+ UTF-8
+ UTF-8
+
+
+
+ org.gcube.dataanalysis
+ dataminer
+ [1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)
+ provided
+
+
+ gis-interface
+ org.gcube.spatial.data
+
+
+
+
+ org.gcube.resources
+ registry-publisher
+ [1.1.0-SNAPSHOT,2.0.0-SNAPSHOT)
+
+
+ org.gcube.resources
+ common-gcore-resources
+ [1.1.0-SNAPSHOT,2.0.0-SNAPSHOT)
+
+
+ junit
+ junit
+ 4.12
+
+
+
+
+
+
+
+ maven-compiler-plugin
+ 3.3
+
+
+ 1.8
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 2.18.1
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+
+
+ ${distroDirectory}/assembly.xml
+
+ dataminer-algorithms-importer-${project.version}
+ false
+
+
+
+ package
+
+ single
+
+
+
+
+
+
+
+
+ n52-releases
+ 52n Releases
+ http://52north.org/maven/repo/releases
+
+ true
+
+
+ false
+
+
+
+
\ No newline at end of file
diff --git a/src/main/java/org/gcube/dataanalysis/wps/mapper/AlgorithmsChecker.java b/src/main/java/org/gcube/dataanalysis/wps/mapper/AlgorithmsChecker.java
new file mode 100644
index 0000000..3d418b5
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/mapper/AlgorithmsChecker.java
@@ -0,0 +1,274 @@
+package org.gcube.dataanalysis.wps.mapper;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.Reader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.net.URLEncoder;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.params.HttpConnectionParams;
+import org.apache.http.params.HttpParams;
+import org.apache.http.util.EntityUtils;
+import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
+
+public class AlgorithmsChecker {
+
+ public static String readPage(URL url) throws Exception {
+
+ DefaultHttpClient httpClient = new DefaultHttpClient();
+ HttpParams params = httpClient.getParams();
+ HttpConnectionParams.setConnectionTimeout(params, 7 * 60000);
+ HttpConnectionParams.setSoTimeout(params, 7 * 60000);
+ HttpConnectionParams.setStaleCheckingEnabled(params, false);
+ HttpConnectionParams.setSoKeepalive(params, false);
+ HttpGet request = null;
+ try {
+ request = new HttpGet(url.toURI());
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw e;
+ }
+ HttpResponse response = httpClient.execute(request);
+ System.out.println("URL executed!");
+ Reader reader = null;
+ try {
+ reader = new InputStreamReader(response.getEntity().getContent());
+ System.out.println("Read input stream!");
+ StringBuffer sb = new StringBuffer();
+ {
+ int read;
+ char[] cbuf = new char[1024];
+ while ((read = reader.read(cbuf)) != -1)
+ sb.append(cbuf, 0, read);
+ }
+
+ EntityUtils.consume(response.getEntity());
+ httpClient.getConnectionManager().shutdown();
+
+ return sb.toString();
+
+ } finally {
+
+ if (reader != null) {
+ try {
+ reader.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ public static String httpgetpage(String url) throws Exception {
+ String page = "";
+ URL urlObj = new URL(url);
+ /*
+ * HttpURLConnection connection = (HttpURLConnection) urlObj
+ * .openConnection(); System.out.println("Connection Opened"); String
+ * encoded =
+ * "Z2lhbmNhcmxvLnBhbmljaGk6ZjA2NjY1OTctNDMwMi00OWNlLWJlYTItNTU1Yjk0ZTU2OWNi"
+ * ; //connection.setRequestMethod("GET"); connection.setDoOutput(true);
+ */
+
+ // connection.setConnectTimeout(60000);
+ // connection.setReadTimeout(60000);
+ // connection.setRequestProperty("Authorization", "Basic " + encoded);
+ /*
+ * HttpURLConnection connection = (HttpURLConnection)
+ * urlObj.openConnection(); connection.setRequestMethod("GET");
+ * connection.setDoOutput(true); connection.setDoInput(true);
+ * connection.setInstanceFollowRedirects(true); connection.connect();
+ */
+ urlObj.openConnection().getInputStream();
+ // BufferedInputStream reader = new
+ // BufferedInputStream(connection.getInputStream());
+
+ // InputStream is = connection.getInputStream();
+ // OutputStream os = connection.getOutputStream();
+
+ System.out.println("input stream OK");
+ return page;
+ }
+
+ public static void main2(String[] args) throws Exception {
+ // String url =
+ // "http://dataminer1-d-d4s.d4science.org:80/wps/WebProcessingService?Service=WPS&gcube-token=f0666597-4302-49ce-bea2-555b94e569cb&Request=DescribeProcess&identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.KNITR_COMPILER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_FISHING_ACTIVITY,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GRID_CWP_TO_COORDINATES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_TAXA,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_INTERSECTOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_SUBTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.LWR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SAMPLEONTABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_MERGER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARES_TO_COORDINATES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_MARINE_TERRESTRIAL,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_MAP_FROM_POINTS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_SPECIES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_TREND_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TAXONOMY_OBSERVATIONS_TREND_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_LME_AREA_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_INTERPOLATION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.modellers.AQUAMAPSNN,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_DUPLICATES_DELETER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEC,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTTABLES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SEADATANET_INTERPOLATOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBINFO,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_MAP_FROM_CSQUARES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ECOPATH_WITH_ECOSIM,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_MONTHLY_FISHING_EFFORT,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.POLYGONS_TO_MAP,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBSCHEMA,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GETTABLEDETAILS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_PER_AREA,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SMARTSAMPLEONTABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RANDOMSAMPLEONTABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBNAMES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.POINTS_TO_MAP,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_GENERATION_FROM_OBIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.WEB_APP_PUBLISHER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CCAMLRTEST&version=1.0.0&";
+ String url = "http://dataminer1-d-d4s.d4science.org:80/wps/WebProcessingService?Service=WPS&gcube-token=f0666597-4302-49ce-bea2-555b94e569cb&Request=DescribeProcess&identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.KNITR_COMPILER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_FISHING_ACTIVITY&version=1.0.0&";
+ String page = httpgetpage(url);
+ // String page = readPage(new URL(url));
+ System.out.println(page);
+ System.out.println("*******************");
+ }
+
+ public static void main1(String[] args) throws Exception {
+
+ String page = readPage(new URL(
+ "http://dataminer1-d-d4s.d4science.org:80/wps/WebProcessingService?Service=WPS&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Request=DescribeProcess&identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.KNITR_COMPILER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_FISHING_ACTIVITY,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GRID_CWP_TO_COORDINATES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_TAXA,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_INTERSECTOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_SUBTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.LWR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SAMPLEONTABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_MERGER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARES_TO_COORDINATES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_MARINE_TERRESTRIAL,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_MAP_FROM_POINTS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MOST_OBSERVED_SPECIES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_TREND_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TAXONOMY_OBSERVATIONS_TREND_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATION_LME_AREA_PER_YEAR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_INTERPOLATION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.modellers.AQUAMAPSNN,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCES_DUPLICATES_DELETER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEC,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTTABLES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SEADATANET_INTERPOLATOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBINFO,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_MAP_FROM_CSQUARES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ECOPATH_WITH_ECOSIM,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESTIMATE_MONTHLY_FISHING_EFFORT,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.POLYGONS_TO_MAP,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBSCHEMA,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GETTABLEDETAILS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SPECIES_OBSERVATIONS_PER_AREA,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SMARTSAMPLEONTABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RANDOMSAMPLEONTABLE,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.LISTDBNAMES,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.POINTS_TO_MAP,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_GENERATION_FROM_OBIS,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.WEB_APP_PUBLISHER,org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CCAMLRTEST&version=1.0.0&"));
+ System.out.println(page);
+ System.out.println("*******************");
+ }
+
+ public class WebPageSaver {
+
+ public void save(String page, String file) throws Exception {
+ OutputStream out = new FileOutputStream(file);
+
+ URL url = new URL(page);
+ URLConnection conn = url.openConnection();
+ conn.connect();
+ InputStream is = conn.getInputStream();
+
+ copy(is, out);
+ is.close();
+ out.close();
+ }
+
+ private void copy(InputStream from, OutputStream to) throws IOException {
+ byte[] buffer = new byte[4096];
+ while (true) {
+ int numBytes = from.read(buffer);
+ if (numBytes == -1) {
+ break;
+ }
+ to.write(buffer, 0, numBytes);
+ }
+ }
+ }
+
+ public static void checkService(String hostname, String token) throws Exception {
+
+ FileTools reader = new FileTools();
+ System.out.println("Downloading capabilities");
+ String getCapaURL = "http://" + hostname + "/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=" + token;
+ WebPageSaver saver = new AlgorithmsChecker().new WebPageSaver();
+ String file = "WebProcessingService.xml";
+ saver.save(getCapaURL, file);
+ String xmlString = reader.loadString(file, "UTF-8");
+ String wpsURL = "http://" + hostname + "/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=" + URLEncoder.encode(token,"UTF-8") + "&Identifier=";
+ String[] lines = xmlString.split("\n");
+ // String tocall = wpsURL;
+ StringBuffer sb = new StringBuffer();
+
+ int j = 0;
+ boolean fail = false;
+ for (int i = 0; i < lines.length; i++) {
+ if (lines[i].contains("ows:Identifier")) {
+ String id = lines[i];
+ id = id.substring(id.indexOf(">") + 1);
+ id = id.substring(0, id.indexOf("<"));
+ System.out.println("ID:" + id);
+ /*
+ * if (j>0) tocall= tocall+","+id; else tocall= tocall+id;
+ */
+ String tocall = wpsURL + URLEncoder.encode(id, "UTF-8");
+ String page = readPage(new URL(tocall));
+
+ System.out.println(page);
+ String startAbs = "";
+ String stopAbs = "";
+
+ System.out.println("*******************");
+ if (page.trim().length() == 0 || page.contains("ExceptionText")) {
+ System.out.println("*******************STOP!!!");
+ fail = true;
+ break;
+ }
+
+ // sb.append(id.substring(id.lastIndexOf(".")+1)+"\n");
+ String algo = id.substring(id.lastIndexOf(".") + 1);
+ String IDWiki = "\n! colspan=2 bgcolor=lightgrey | " + algo + "
\n|-\n|| Description\n||";
+
+ String Description = page.substring(page.indexOf(startAbs) + startAbs.length(), page.indexOf(stopAbs));
+ sb.append(IDWiki);
+ sb.append(Description + "\n");
+ sb.append("|-\n");
+
+ j++;
+ }
+
+ }
+ /*
+ * System.out.println("To call: "+tocall); String page = readPage(new
+ * URL(tocall)); System.out.println(page);
+ */
+ System.out.println("*******************");
+ if (!fail)
+ System.out.println(sb);
+ }
+
+ public static void main(String[] args) throws Exception {
+ // checkService("dataminer1-d-d4s.d4science.org",
+ // "4ccc2c35-60c9-4c9b-9800-616538d5d48b");
+ checkService("dataminer1-devnext.d4science.org", "f9d49d76-cd60-48ed-9f8e-036bcc1fc045");
+ }
+
+ public static void main3(String[] args) throws Exception {
+
+ FileTools reader = new FileTools();
+ // String xmlString = reader.loadString("WebProcessingServicePre.xml",
+ // "UTF-8");
+ // String xmlString = reader.loadString("WebProcessingServiceDev.xml",
+ // "UTF-8");
+ String xmlString = reader.loadString("WebProcessingServiceNext.xml", "UTF-8");
+ // String wpsURL =
+ // "http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=";
+ // String wpsURL =
+ // "http://dataminer1-pre.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=0afbf5c8-b1ee-4e44-a6e7-0c235c8dc959&Identifier=";
+ String wpsURL = "http://dataminer1-devnext.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=f9d49d76-cd60-48ed-9f8e-036bcc1fc045&Identifier=";
+ String[] lines = xmlString.split("\n");
+ // String tocall = wpsURL;
+ StringBuffer sb = new StringBuffer();
+
+ int j = 0;
+ for (int i = 0; i < lines.length; i++) {
+ if (lines[i].contains("ows:Identifier")) {
+ String id = lines[i];
+ id = id.substring(id.indexOf(">") + 1);
+ id = id.substring(0, id.indexOf("<"));
+ System.out.println("ID:" + id);
+ /*
+ * if (j>0) tocall= tocall+","+id; else tocall= tocall+id;
+ */
+ String tocall = wpsURL + id;
+ String page = readPage(new URL(tocall));
+
+ System.out.println(page);
+ String startAbs = "";
+ String stopAbs = "";
+
+ System.out.println("*******************");
+ if (page.trim().length() == 0 || page.contains("JAVA_StackTrace")) {
+ System.out.println("*******************STOP!!!");
+ break;
+ }
+
+ // sb.append(id.substring(id.lastIndexOf(".")+1)+"\n");
+ String algo = id.substring(id.lastIndexOf(".") + 1);
+ String IDWiki = "\n! colspan=2 bgcolor=lightgrey | " + algo + "
\n|-\n|| Description\n||";
+
+ String Description = page.substring(page.indexOf(startAbs) + startAbs.length(), page.indexOf(stopAbs));
+ sb.append(IDWiki);
+ sb.append(Description + "\n");
+ sb.append("|-\n");
+
+ j++;
+ }
+
+ }
+ /*
+ * System.out.println("To call: "+tocall); String page = readPage(new
+ * URL(tocall)); System.out.println(page);
+ */
+ System.out.println("*******************");
+
+ System.out.println(sb);
+ }
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/mapper/ClassGenerator.java b/src/main/java/org/gcube/dataanalysis/wps/mapper/ClassGenerator.java
new file mode 100644
index 0000000..aae1fea
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/mapper/ClassGenerator.java
@@ -0,0 +1,183 @@
+package org.gcube.dataanalysis.wps.mapper;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+
+import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
+import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
+import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
+import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
+import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
+import org.gcube.dataanalysis.ecoengine.interfaces.Model;
+import org.gcube.dataanalysis.ecoengine.processing.factories.ProcessorsFactory;
+import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.StatisticalTypeToWPSType;
+
+public class ClassGenerator {
+
+ private String configPath = "./cfg/";
+ private String generationPath = "./src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/";
+ private StatisticalTypeToWPSType converter;
+
+ private String packageString = null;
+ private String javaFileName = null;
+
+ public ClassGenerator(String algorithmName, String implementation, String generationPath, String configPath) throws Exception {
+ this.generationPath = generationPath;
+ this.configPath = configPath;
+ converter = new StatisticalTypeToWPSType();
+ this.javaFileName = generateEcologicalEngineClasses(algorithmName, implementation);
+ }
+
+ public String getPackageString() {
+ return packageString;
+ }
+
+
+
+ public String getJavaFileName() {
+ return javaFileName;
+ }
+
+ private String generateEcologicalEngineClasses(String algorithmName, String implementation) throws Exception {
+ AlgorithmConfiguration config = new AlgorithmConfiguration();
+ config.setConfigPath(configPath);
+ config.setAlgorithmClassLoader(Thread.currentThread().getContextClassLoader());
+ // set scope etc..
+ HashMap> algorithms = ProcessorsFactory.getAllFeatures(config);
+ for (String algorithmSet : algorithms.keySet()) {
+ List parametersList = algorithms.get(algorithmSet);
+ System.out.println(algorithmSet + ":" + parametersList.toString());
+ for (String algorithm : parametersList) {
+
+ if (!algorithm.equals(algorithmName))
+ continue;
+
+ // got an algorithm
+ System.out.println("Algorithm: " + algorithm);
+ String description = ""; // get this information
+ StringBuffer classWriter = new StringBuffer();
+ List inputs = null;
+ StatisticalType outputs = null;
+ // build class preamble
+ config.setAgent(algorithm);
+ config.setModel(algorithm);
+ config.setAlgorithmClassLoader(Thread.currentThread().getContextClassLoader());
+ String interfaceString = "";
+ Object impl = Class.forName(implementation).newInstance();
+ if ((impl instanceof GenericAlgorithm))
+ {
+ GenericAlgorithm ga = (GenericAlgorithm)impl;
+ inputs = ga.getInputParameters();
+ description = ga.getDescription();
+ try
+ {
+ outputs = ga.getOutput();
+ }
+ catch (Exception e)
+ {
+ outputs = null;
+ }
+ }
+ else if (impl instanceof Model){
+ Model ca = (Model)impl;
+ inputs = ca.getInputParameters();
+ description = ca.getDescription();
+ try
+ {
+ outputs = ca.getOutput();
+ }
+ catch (Exception e)
+ {
+ outputs = null;
+ }
+ }
+ else if ((impl instanceof ComputationalAgent))
+ {
+ ComputationalAgent ca = (ComputationalAgent)impl;
+ inputs = ca.getInputParameters();
+ description = ca.getDescription();
+ try
+ {
+ outputs = ca.getOutput();
+ }
+ catch (Exception e)
+ {
+ outputs = null;
+ }
+ }
+ else
+ {
+ throw new Exception("invalid algorithm class "+impl.getClass());
+ }
+ try{
+ if (algorithmSet.equals("DISTRIBUTIONS")) {
+ packageString = "generators";
+ interfaceString = "IGenerator";
+ } else if (algorithmSet.equals("TRANSDUCERS")) {
+ packageString = "transducerers";
+ interfaceString = "ITransducer";
+ } else if (algorithmSet.equals("MODELS")) {
+ packageString = "modellers";
+ interfaceString = "IModeller";
+ } else if (algorithmSet.equals("CLUSTERERS")) {
+ packageString = "clusterers";
+ interfaceString = "IClusterer";
+ } else if (algorithmSet.equals("TEMPORAL_ANALYSIS")) {
+
+ } else if (algorithmSet.equals("EVALUATORS")) {
+ packageString = "evaluators";
+ interfaceString = "IEvaluator";
+ }
+ }catch(Exception e){
+ System.out.println("Error in retrieving output:");
+ e.printStackTrace();
+ }
+ classWriter.append(((String) StatisticalTypeToWPSType.templates.get("package")).replace("#PACKAGE#", packageString) + "\n" + ((String) StatisticalTypeToWPSType.templates.get("import")) + "\n");
+ System.out.println("Class preamble: \n" + classWriter.toString());
+
+ // build class description
+ String classdescription = (String) StatisticalTypeToWPSType.templates.get("description");
+ //modification of 20/07/15
+ classdescription = classdescription.replace("#TITLE#", algorithm).replace("#ABSTRACT#", description).replace("#CLASSNAME#", algorithm).replace("#PACKAGE#", packageString);
+ System.out.println("Class description : \n" + classdescription);
+ String classdefinition = (String) StatisticalTypeToWPSType.templates.get("class_definition");
+ classdefinition = classdefinition.replace("#CLASSNAME#", algorithm).replace("#INTERFACE#", interfaceString);
+ System.out.println("Class definition: \n" + classdefinition);
+ classWriter.append(classdescription + "\n");
+ classWriter.append(classdefinition + "\n");
+ // attach scope input deprecated!
+ // classWriter.append((String) StatisticalTypeToWPSType.templates.get("scopeInput") + "\n");
+ // classWriter.append((String) StatisticalTypeToWPSType.templates.get("usernameInput") + "\n");
+ for (StatisticalType input : inputs) {
+ System.out.println(input);
+ String wpsInput = converter.convert2WPSType(input, true, config);
+ if (wpsInput != null) {
+ classWriter.append(wpsInput + "\n");
+ System.out.println("Input:\n" + wpsInput);
+ }
+ }
+ if (outputs != null) {
+ System.out.println("Alg. Output:\n" + outputs);
+ String wpsOutput = converter.convert2WPSType(outputs, false, config);
+ classWriter.append(wpsOutput + "\n");
+ System.out.println("Output:\n" + wpsOutput);
+ }
+ else
+ System.out.println("Output is empty!");
+ // add potential outputs
+ classWriter.append((String) StatisticalTypeToWPSType.templates.getProperty("optionalOutput") + "\n");
+ classWriter.append((String) StatisticalTypeToWPSType.templates.get("class_closure"));
+
+ System.out.println("Class:\n" + classWriter.toString());
+ System.out.println("Saving...");
+ File dirs = new File(generationPath + packageString);
+ if (!dirs.exists()) dirs.mkdirs();
+ FileTools.saveString(generationPath + packageString+"/"+algorithm + ".java", classWriter.toString(), true, "UTF-8");
+ return generationPath + packageString+"/"+algorithm + ".java";
+ }
+ }
+ return null;
+ }
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/mapper/DataMinerUpdater.java b/src/main/java/org/gcube/dataanalysis/wps/mapper/DataMinerUpdater.java
new file mode 100644
index 0000000..e1a4ef5
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/mapper/DataMinerUpdater.java
@@ -0,0 +1,555 @@
+package org.gcube.dataanalysis.wps.mapper;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.net.URLConnection;
+import java.util.Arrays;
+import java.util.Base64;
+import java.util.List;
+import java.util.UUID;
+import java.util.jar.Attributes;
+import java.util.jar.JarEntry;
+import java.util.jar.JarOutputStream;
+import java.util.jar.Manifest;
+
+import javax.tools.Diagnostic;
+import javax.tools.Diagnostic.Kind;
+import javax.tools.DiagnosticCollector;
+import javax.tools.JavaCompiler;
+import javax.tools.JavaFileObject;
+import javax.tools.StandardJavaFileManager;
+import javax.tools.ToolProvider;
+
+import org.gcube.common.resources.gcore.GenericResource;
+import org.gcube.common.scope.api.ScopeProvider;
+import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
+import org.gcube.informationsystem.publisher.RegistryPublisher;
+import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.w3c.dom.ls.DOMImplementationLS;
+import org.w3c.dom.ls.LSSerializer;
+
+public class DataMinerUpdater {
+
+ // Example of call with modifications for private users
+ // ./addAlgorithm.sh NETCDF_SUPPORT_JAVA_13
+ // BLACK_BOX-[gianpaolo.coro,giancarlo.panichi]
+ // org.gcube.dataanalysis.executor.rscripts.NetCDFSupportJava
+ // /gcube/devNext/NextNext transducerers N
+ // http://data-d.d4science.org/ZGZSUHlQODRZY3pBQVZxNlVOK0xzUVZKc0dZczBKaEpHbWJQNStIS0N6Yz0
+ // "NetCDF Support Java" dev/software/
+ public static void main(String args[]) throws Exception {
+ String algorithmName = null;
+ String implementation = null;
+ String category = null;
+ String configPath = null;
+ String libPath = null;
+ String scope = null;
+ String description = null;
+ String atype = null;
+ String skipJava = null;
+ String url = null;
+ String privateusers = null;
+
+ int i = 0;
+ for (String arg : args) {
+ if (arg.startsWith("-c")) {
+ configPath = arg.substring(2);
+ }
+ if (arg.startsWith("-i")) {
+ implementation = arg.substring(2);
+ }
+ if (arg.startsWith("-a")) {
+ algorithmName = arg.substring(2);
+ }
+ if (arg.startsWith("-t")) {
+ category = arg.substring(2);
+ int idxprivusers = category.indexOf("-[");
+ if (idxprivusers > 1) {
+ privateusers = category.substring(idxprivusers + 2, category.length() - 1);
+ System.out.println("Found private Users " + privateusers);
+ }
+ }
+ if (arg.startsWith("-l")) {
+ libPath = arg.substring(2);
+ }
+ if (arg.startsWith("-s")) {
+ scope = arg.substring(2);
+ }
+
+ if (arg.startsWith("-e")) {
+ atype = arg.substring(2);
+ }
+
+ if (arg.startsWith("-k")) {
+ skipJava = arg.substring(2);
+ if (skipJava.equals("Y"))
+ skipJava = null;
+ }
+
+ if (arg.startsWith("-u")) {
+ url = arg.substring(2);
+ }
+
+ if (arg.startsWith("-d")) {
+ description = arg.substring(2);
+
+ for (int j = i + 1; j < args.length; j++) {
+ description = description + " " + args[j];
+ }
+ System.out.println("DESCRIPTION " + description);
+ break;
+ }
+
+ i++;
+ }
+
+ if (libPath == null || category == null || implementation == null || configPath == null || algorithmName == null || scope == null) {
+ System.out.println("Wrong parameters");
+ System.out.println("Usage example: ");
+ System.exit(0);
+ }
+
+ Update(algorithmName, implementation, category, configPath, libPath, scope, description, atype, skipJava, url, privateusers);
+ }
+
+ public static void UpdateFiles(String configPath, String atype, String algorithmName, String implementation, String category) throws Exception {
+ File transducers = new File(configPath, atype + ".properties");
+ File userpersp = new File(configPath, "userperspective.properties");
+
+ // UPDATING TRANDUCERERS FILES
+ System.out.println("*******Modifying transducerer file");
+ BufferedReader br = new BufferedReader(new FileReader(transducers));
+ String entry = algorithmName + "=" + implementation;
+ String line = br.readLine();
+ boolean found = false;
+ boolean endline = false;
+ String lastline = "";
+ StringBuffer sbb = new StringBuffer();
+ while (line != null) {
+ lastline = line;
+ if (line.trim().equalsIgnoreCase(entry)) {
+ found = true;
+ break;
+ }
+ sbb.append(line.trim() + System.lineSeparator());
+ line = br.readLine();
+ }
+ br.close();
+
+ if (!found) {
+
+ FileWriter fw = new FileWriter(transducers, false);
+ sbb.append(entry + System.lineSeparator());
+ fw.write(sbb.toString());
+
+ fw.close();
+ System.out.println("*******Modified transducerer file");
+ } else
+ System.out.println("*******Transducerer file was not modified, since it already contains the algorithm");
+
+ // UPDATING USER PERSPECTIVE FILE
+ System.out.println("*******Modifying user perspective file");
+ br = new BufferedReader(new FileReader(userpersp));
+ line = br.readLine();
+ found = false;
+ StringBuffer sb = new StringBuffer();
+ boolean foundCategory = false;
+ while (line != null) {
+ String cat = line.substring(0, line.indexOf("="));
+ if (cat.equalsIgnoreCase(category)) {
+ foundCategory = true;
+ String arguments = line.substring(line.indexOf("=") + 1).trim();
+ String argums[] = arguments.split(",");
+ List valid = Arrays.asList(argums);
+
+ // if (!line.contains(algorithmName)){
+ if (!valid.contains(algorithmName)) {
+ if (line.substring(line.indexOf("=") + 1).trim().length() == 0)
+ line = line + algorithmName;
+ else
+ line = line + "," + algorithmName;
+ } else
+ found = true;
+ }
+ if (line.trim().length() > 0)
+ sb.append(line + System.lineSeparator());
+ line = br.readLine();
+ }
+ br.close();
+
+ if (!foundCategory) {
+ sb.append(category + "=" + algorithmName + System.lineSeparator());
+ FileWriter fw = new FileWriter(userpersp, false);
+ fw.write(sb.toString());
+ fw.close();
+ System.out.println("*******Modified user perspective file");
+ } else {
+ if (!found) {
+
+ FileWriter fw = new FileWriter(userpersp, false);
+ fw.write(sb.toString());
+ fw.close();
+ System.out.println("*******Modified user perspective file");
+ } else
+ System.out.println("*******Perspective file was not modified, since it already contains the algorithm");
+ }
+
+ if (atype.equals("models"))
+ atype = "modellers";
+ if (atype.equals("nodealgorithms"))
+ atype = "generators";
+ if (atype.equals("algorithms"))
+ atype = "generators";
+
+ }
+
+ public static void Update(String algorithmName, String implementation, String category, String configPath, String applicationlibs, String scope, String description, String atype, String skipJava, String url, String privateusers) throws Exception {
+ BufferedReader br;
+ String line;
+ if (atype == null || atype.trim().length() == 0)
+ atype = "transducerers";
+
+ System.out.println("*****Parameters");
+ System.out.println("*****algorithmName:" + algorithmName);
+ System.out.println("*****implementation:" + implementation);
+ System.out.println("*****category:" + category);
+ System.out.println("*****configPath:" + configPath);
+ System.out.println("*****applicationlibs:" + applicationlibs);
+ System.out.println("*****scope:" + scope);
+ System.out.println("*****description:" + description);
+ System.out.println("*****atype:" + atype);
+ System.out.println("*****skipJava:" + ((skipJava == null) ? true : false));
+ System.out.println("*****url:" + url);
+
+ System.out.println("*******1 - Downloading file");
+
+ if (url != null && url.length() > 1 && skipJava != null) {
+ File jarfile = new File(applicationlibs, algorithmName + ".jar");
+ System.out.println("*******Downloading to " + jarfile.getAbsolutePath());
+ downloadFromUrl(url, jarfile.getAbsolutePath());
+ System.out.println("*******Download OK - check " + jarfile.exists());
+ System.out.println("*******Updating classpath");
+ // load the jar into the classpath
+ URLClassLoader sysloader = (URLClassLoader) Thread.currentThread().getContextClassLoader();
+ Class sysclass = URLClassLoader.class;
+ Class[] parameters = new Class[] { URL.class };
+
+ try {
+ Method method = sysclass.getDeclaredMethod("addURL", parameters);
+ method.setAccessible(true);
+ method.invoke(sysloader, new Object[] { jarfile.toURI().toURL() });
+ } catch (Throwable t) {
+ t.printStackTrace();
+ throw new IOException("Error, could not add URL to system classloader");
+ }// end try catch
+
+ } else
+ System.out.println("*******1 - Nothing to download");
+
+ System.out.println("*******2 - Updating files");
+ UpdateFiles(configPath, atype, algorithmName, implementation, category);
+ if (atype.equals("nodealgorithms")) {
+ System.out.println("*******2 - Updating also algorithms file");
+ UpdateFiles(configPath, "algorithms", algorithmName, implementation, category);
+ }
+ System.out.println("*******2 - Files updated!");
+
+ // GENERATING CLASS
+
+ if (skipJava != null) {
+
+ System.out.println("*******3 - Generating classes");
+ String generationPath = "./org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/";
+ ClassGenerator classGenerator = new ClassGenerator(algorithmName, implementation, generationPath, configPath);
+ System.out.println("*******3 - Classes generated! " + classGenerator.getJavaFileName());
+
+ // PREPARING JAR FILE
+ File dataminerjar = new File(applicationlibs, algorithmName + "_interface.jar");
+ dataminerjar.delete();
+
+ // COMPILING JAR
+ System.out.println("*******4 - Compiling the Jar");
+
+ compileJava(classGenerator.getJavaFileName());
+
+ System.out.println("*******->creating jar");
+
+ createJar(dataminerjar, classGenerator.getPackageString(), algorithmName);
+
+ // command(classes);
+ // command(createjar);
+ System.out.println("*******Size " + dataminerjar.length());
+ System.out.println("*******4 - Jar compiled!! " + dataminerjar);
+
+ } else {
+ System.out.println("*******3 - Generating classes skipped");
+ System.out.println("*******4 - Compiling the Jar skipped");
+
+ }
+
+ // INDEX ON THE IS
+ System.out.println("*******5 - Indexing on the IS");
+ System.out.println("Indexing on IS in scope " + scope);
+ indexOnIS(algorithmName, description, scope, privateusers);
+ System.out.println("*******5 - Finished indexing on the IS");
+
+ System.out.println("*******All done!");
+ System.out.println("*******Table entry:");
+ System.out.println("| " + algorithmName + " | " + "system" + " | " + category + "| Prod | ./addAlgorithm.sh " + algorithmName + " " + category + " " + implementation + " " + scope + " " + atype + " " + (skipJava != null ? "N" : "Y") + " " + ((url != null && url.length() > 1) ? url : "k") + " \"" + description + "\"" + " | none |");
+
+ }
+
+ private static void compileJava(String javaFileName) throws Exception {
+ JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
+ StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
+ Iterable extends JavaFileObject> compilationUnits1 = fileManager.getJavaFileObjectsFromFiles(Arrays.asList(new File(javaFileName)));
+
+ DiagnosticCollector diagnostics = new DiagnosticCollector();
+
+ boolean success = compiler.getTask(null, fileManager, diagnostics, null, null, compilationUnits1).call();
+
+ for (Diagnostic extends JavaFileObject> diagnostic : diagnostics.getDiagnostics())
+ if (diagnostic.getKind() == Kind.ERROR)
+ System.out.format("Error on line %d in %s%n", diagnostic.getLineNumber(), diagnostic.getSource().toUri());
+
+ if (!success)
+ throw new Exception("error compiling generated class");
+
+ }
+
+ private static void createJar(File dataminerjar, String packageString, String algorithmName) throws Exception {
+ Manifest manifest = new Manifest();
+ manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
+ try (JarOutputStream target = new JarOutputStream(new FileOutputStream(dataminerjar), manifest)) {
+ target.putNextEntry(new JarEntry("org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/" + packageString + "/"));
+ String source = "org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/" + packageString + "/" + algorithmName + ".class";
+ File fileSource = new File(source);
+ JarEntry entry = new JarEntry(source);
+ target.putNextEntry(entry);
+ try (BufferedInputStream in = new BufferedInputStream(new FileInputStream(fileSource));) {
+ byte[] buffer = new byte[1024];
+ while (true) {
+ int count = in.read(buffer);
+ if (count == -1)
+ break;
+ target.write(buffer, 0, count);
+ }
+ }
+ target.closeEntry();
+ target.close();
+ }
+ }
+
+ public static String ExecuteGetLineOld(String cmd) {
+
+ Process process = null;
+ String lastline = "";
+ StringBuffer sb = new StringBuffer();
+ try {
+ System.out.println("ExecuteScript-> OSCommand-> Executing Control ->" + cmd);
+
+ process = Runtime.getRuntime().exec(cmd);
+
+ BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
+ String line = br.readLine();
+ System.out.println("ExecuteScript-> OSCommand-> line->" + line);
+
+ while (line != null) {
+ try {
+ lastline = line;
+ System.out.println("ExecuteScript-> OSCommand-> line->" + line);
+ line = br.readLine();
+ if (line != null)
+ sb.append(line + System.lineSeparator());
+ } catch (EOFException e) {
+ System.out.println("ExecuteScript-> OSCommand -> Process Finished with EOF");
+ break;
+ } catch (Exception e) {
+ line = "ERROR";
+ break;
+ }
+ }
+
+ System.out.println("ExecuteScript-> OSCommand -> Process Finished");
+
+ } catch (Throwable e) {
+ System.out.println("ExecuteScript-> OSCommand-> error ");
+ e.printStackTrace();
+ lastline = "ERROR";
+ }
+ process.destroy();
+ System.out.println("ExecuteScript-> OSCommand-> Process destroyed ");
+ return sb.toString();
+ }
+
+ public static void indexOnIS(String algorithmName, String algorithmDescription, String scope, String privateusers) throws Exception {
+ System.out.println("setting scope to " + scope);
+
+ String secondaryTypePublic = "StatisticalManagerAlgorithm";
+ String secondaryTypePrivate = "StatisticalManagerAlgorithmPrivate";
+ String secondaryType = secondaryTypePublic;
+ if (privateusers != null)
+ secondaryType = secondaryTypePrivate;
+
+ InfrastructureDialoguer id = new InfrastructureDialoguer(scope);
+ if (privateusers != null) {
+ System.out.println("Transforming Algorithm " + algorithmName + " into private algorithm");
+ ScopeProvider.instance.set(scope);
+ System.out.println("Deleting previous algorithm " + algorithmName + " from private algorithms");
+ id.deleteAlgorithmInScope(algorithmName, secondaryTypePrivate);
+ System.out.println("Deleting previous algorithm " + algorithmName + " from public algorithms");
+ id.deleteAlgorithmInScope(algorithmName, secondaryTypePublic);
+ } else {
+ ScopeProvider.instance.set(scope);
+ List algorithms = id.getAlgorithmsInScope(secondaryType);
+ System.out.println("Deleting previous algorithm " + algorithmName + " from private algorithms");
+ id.deleteAlgorithmInScope(algorithmName, secondaryTypePrivate);
+ boolean found = false;
+ for (String alg : algorithms) {
+ System.out.println("Algorithm in scope " + alg);
+ if (alg.equals(algorithmName)) {
+ System.out.println("Found Match! ");
+ found = true;
+ break;
+ }
+ }
+
+ if (found) {
+ return;
+ }
+ }
+
+ String xml = FileTools.loadString("algorithmTemplate", "UTF-8");
+
+ xml = xml.replace("#UUID#", UUID.randomUUID().toString());
+ xml = xml.replace("#SCOPE#", scope);
+ xml = xml.replace("#NAME#", algorithmName);
+ xml = xml.replace("#DESCRIPTION#", algorithmDescription);
+
+ // patch to add private users property - GP
+ if (privateusers != null) {
+ ScopeProvider.instance.set(scope);
+ /* encryption using d4science */
+ /*
+ * InputStream privateusersstream = new
+ * ByteArrayInputStream(privateusers
+ * .getBytes(StandardCharsets.UTF_8.name())); ByteArrayOutputStream
+ * baos = new ByteArrayOutputStream(); new
+ * EncryptionUtil().encrypt(privateusersstream, baos); String
+ * privateusersencr = new String( baos.toByteArray());
+ */
+ String privateuserencr = privateusers;//encrypt(privateusers);
+
+ xml = xml.replace("", "" + privateuserencr + "");
+ }
+
+ xml = xml.trim();
+ System.out.println("XML:" + xml);
+
+ ScopeProvider.instance.set(scope);
+ GenericResource toPublish = new GenericResource();
+
+ Document document = toPublish.newProfile().description(algorithmDescription.replace("\"", "")).name(algorithmName).type(secondaryType).newBody().getOwnerDocument();
+
+ toPublish.profile().newBody(xml);
+ Node n = toPublish.profile().body();
+ DOMImplementationLS lsImpl = (DOMImplementationLS) n.getOwnerDocument().getImplementation().getFeature("LS", "3.0");
+ LSSerializer serializer = lsImpl.createLSSerializer();
+ serializer.getDomConfig().setParameter("xml-declaration", false); // by
+ // default
+ // its
+ // true,
+ // so
+ // set
+ // it
+ // to
+ // false
+ // to
+ // get
+ // String
+ // without
+ // xml-declaration
+ String str = serializer.writeToString(n);
+
+ System.out.println("STRING:" + str);
+
+ System.out.println(toPublish.profile().body());
+ RegistryPublisher rp = RegistryPublisherFactory.create();
+
+ toPublish = rp.create(toPublish);
+ System.out.println("PUBLISHED");
+ }
+
+ public static String encrypt(String text) {
+ return new String(Base64.getEncoder().encode(xor(text.getBytes())));
+ }
+
+ public static String decrypt(String hash) {
+ try {
+ return new String(xor(Base64.getDecoder().decode(hash.getBytes())), "UTF-8");
+ } catch (java.io.UnsupportedEncodingException ex) {
+ throw new IllegalStateException(ex);
+ }
+ }
+
+ private static byte[] xor(final byte[] input) {
+ final byte[] output = new byte[input.length];
+ final byte[] secret = "dminstall".getBytes();
+ int spos = 0;
+ for (int pos = 0; pos < input.length; ++pos) {
+ output[pos] = (byte) (input[pos] ^ secret[spos]);
+ spos += 1;
+ if (spos >= secret.length) {
+ spos = 0;
+ }
+ }
+ return output;
+ }
+
+ static void downloadFromUrl(String urlString, String localFilename) throws IOException {
+ InputStream is = null;
+ FileOutputStream fos = null;
+ System.out.println("Downloading :" + urlString);
+ URL url = new URL(urlString);
+ try {
+ URLConnection urlConn = url.openConnection();// connect
+
+ is = urlConn.getInputStream(); // get connection inputstream
+ fos = new FileOutputStream(localFilename); // open outputstream to
+ // local file
+
+ byte[] buffer = new byte[4096]; // declare 4KB buffer
+ int len;
+
+ // while we have availble data, continue downloading and storing to
+ // local file
+ while ((len = is.read(buffer)) > 0) {
+ fos.write(buffer, 0, len);
+ }
+ } finally {
+ try {
+ if (is != null) {
+ is.close();
+ }
+ } finally {
+ if (fos != null) {
+ fos.close();
+ }
+ }
+ }
+ }
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/mapper/InfrastructureDialoguer.java b/src/main/java/org/gcube/dataanalysis/wps/mapper/InfrastructureDialoguer.java
new file mode 100644
index 0000000..bb96f9c
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/mapper/InfrastructureDialoguer.java
@@ -0,0 +1,114 @@
+package org.gcube.dataanalysis.wps.mapper;
+
+
+import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
+import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.gcube.common.encryption.StringEncrypter;
+import org.gcube.common.resources.gcore.GenericResource;
+import org.gcube.common.resources.gcore.ServiceEndpoint;
+import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
+import org.gcube.common.scope.api.ScopeProvider;
+import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
+import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
+import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.DatabaseInfo;
+import org.gcube.informationsystem.publisher.RegistryPublisher;
+import org.gcube.informationsystem.publisher.RegistryPublisherFactory;
+import org.gcube.resources.discovery.client.api.DiscoveryClient;
+import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
+
+public class InfrastructureDialoguer {
+ public String scope;
+
+ public InfrastructureDialoguer(String scope){
+
+ this.scope = scope;
+ }
+
+ public DatabaseInfo getDatabaseInfo(String resourceName) throws Exception{
+ DatabaseInfo dbi = new DatabaseInfo();
+ AnalysisLogger.getLogger().debug("Searching for Database "+resourceName+" in scope "+scope);
+ SimpleQuery query = queryFor(ServiceEndpoint.class);
+// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'StatisticalManagerDataBase' ");
+// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq '"+resourceName+"' ");
+ query.addCondition("$resource/Profile/Name eq '"+resourceName+"' ");
+ DiscoveryClient client = clientFor(ServiceEndpoint.class);
+ List resources = client.submit(query);
+ if (resources==null || resources.size()==0){
+ throw new Exception("No resource named "+resourceName+" available in scope "+scope);
+ }
+ else{
+ AccessPoint ap = resources.get(0).profile().accessPoints().iterator().next();
+ dbi.url = ap.address();
+ dbi.username = ap.username();
+ dbi.password = StringEncrypter.getEncrypter().decrypt(ap.password().trim());
+
+ for (ServiceEndpoint.Property property:ap.properties()){
+ if (property.name().equalsIgnoreCase("driver"))
+ dbi.driver = property.value();
+ }
+
+ AnalysisLogger.getLogger().debug("Found Database : "+dbi);
+ }
+
+ if (dbi.url == null)
+ throw new Exception("No database URL for resource "+resourceName+" available in scope "+scope);
+ return dbi;
+
+ }
+
+ public void deleteAlgorithmInScope(String algorithmName, String secondaryType) throws Exception{
+ System.out.println("Searching for Algorithms in scope "+scope);
+ SimpleQuery query = queryFor(GenericResource.class);
+ query.addCondition("$resource/Profile/SecondaryType eq '"+secondaryType+"' ");
+
+ DiscoveryClient client = clientFor(GenericResource.class);
+ List resources = client.submit(query);
+ if (resources==null || resources.size()==0){
+ System.out.println("No resource named "+secondaryType+"/"+algorithmName+" available in scope "+scope);
+ return;
+ }
+ System.out.println("Found "+resources.size()+" resources");
+ for (GenericResource resource: resources){
+ if (resource.profile().name().equals(algorithmName)){
+ System.out.println("Removing Algorithm "+algorithmName);
+ RegistryPublisher rp = RegistryPublisherFactory.create();
+ rp.remove(resource);
+ }
+ }
+ }
+
+
+ public List getAlgorithmsInScope(String secondaryType) throws Exception{
+ AnalysisLogger.getLogger().debug("Searching for Algorithms in scope "+scope);
+ SimpleQuery query = queryFor(GenericResource.class);
+ query.addCondition("$resource/Profile/SecondaryType eq '"+secondaryType+"' ");
+
+ DiscoveryClient client = clientFor(GenericResource.class);
+ List resources = client.submit(query);
+ if (resources==null || resources.size()==0){
+ System.out.println("WARNING: No resource named StatisticalManagerAlgorithm available in scope "+scope);
+ return new ArrayList();
+ }
+ List resourcesNames = new ArrayList();
+ AnalysisLogger.getLogger().debug("Found "+resources.size()+" resources");
+ for (GenericResource resource: resources){
+ resourcesNames.add(resource.profile().name());
+ }
+ return resourcesNames;
+
+ }
+
+ public static void main(String[] args) throws Exception{
+ AnalysisLogger.setLogger("cfg/"
+ + AlgorithmConfiguration.defaultLoggerFile);
+ InfrastructureDialoguer dialoguer = new InfrastructureDialoguer("/gcube/devsec/devVRE");
+// dialoguer.getDatabaseInfo("StatisticalManagerDataBase");
+ dialoguer.getDatabaseInfo("FishBase");
+
+ }
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/mapper/WpsxmlGenerator.java b/src/main/java/org/gcube/dataanalysis/wps/mapper/WpsxmlGenerator.java
new file mode 100644
index 0000000..f87a112
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/mapper/WpsxmlGenerator.java
@@ -0,0 +1,48 @@
+package org.gcube.dataanalysis.wps.mapper;
+
+import java.io.File;
+import java.io.FileReader;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+
+
+public class WpsxmlGenerator {
+
+ public static void main (String[] args) throws Exception{
+// String transducerers = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/transducerers.properties";
+// String transducerers = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/models.properties";
+ String transducerers = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/nodealgorithms.properties";
+ String userp = "C:/Users/coro/Desktop/WorkFolder/Workspace/EcologicalEngineConfiguration/cfg/userperspective.properties";
+ String atype = "nodealgorithms";
+ Properties props = new Properties();
+ props.load(new FileReader(new File(transducerers)));
+
+ Properties persp= new Properties();
+ persp.load(new FileReader(new File(userp)));
+ Collection unsorted = (Collection)props.keySet();
+
+ List list = new ArrayList(unsorted);
+ java.util.Collections.sort(list);
+
+ for (Object algorithm:list)
+ {
+ String classname = (String) props.getProperty((String)algorithm);
+ String found = "OTHER";
+ for (Object category:persp.keySet()){
+ String algorithms = persp.getProperty((String)category);
+ if (algorithms.contains((String)algorithm)){
+ found = (String)category;
+ break;
+ }
+ }
+
+ String addAlgorithm = "./addAlgorithm.sh "+((String)algorithm).trim()+" " +found+" "+classname+" /gcube/devsec "+atype+" Y "+"a test algorithm for the alg publisher";
+ System.out.println(addAlgorithm);
+ //System.out.println("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers."+algorithm+"");
+ }
+ }
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/remote/GetCapabilitiesChecker.java b/src/main/java/org/gcube/dataanalysis/wps/remote/GetCapabilitiesChecker.java
new file mode 100644
index 0000000..8c2e810
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/remote/GetCapabilitiesChecker.java
@@ -0,0 +1,343 @@
+package org.gcube.dataanalysis.wps.remote;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.params.HttpConnectionParams;
+import org.apache.http.params.HttpParams;
+import org.apache.http.util.EntityUtils;
+import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
+
+public class GetCapabilitiesChecker {
+
+ private static final char DOT = '.';
+
+ private static final char SLASH = '/';
+
+ private static final String CLASS_SUFFIX = ".class";
+
+ private static final String BAD_PACKAGE_ERROR = "Unable to get resources from path '%s'. Are you sure the package '%s' exists?";
+
+ public static List> getClassesInSamePackageFromJar(String packageName) throws Exception {
+
+ String scannedPath = packageName.replace(".", "/");
+ URL scannedUrl = Thread.currentThread().getContextClassLoader().getResource(scannedPath);
+ String jarPath = scannedUrl.getFile();
+
+ AnalysisLogger.getLogger().debug("Jar Path complete: " + jarPath);
+ jarPath = jarPath.substring(jarPath.indexOf("file:/") + 6, jarPath.lastIndexOf("!"));
+ if (jarPath.startsWith("home"))
+ jarPath = "/" + jarPath;
+ AnalysisLogger.getLogger().debug("Jar Path: " + jarPath);
+
+ JarFile jarFile = null;
+ List> result = new ArrayList>();
+
+ String pathTojars = new File(jarPath).getParent();
+
+ File[] jars = new File(pathTojars).listFiles();
+ try {
+
+ for (File jar : jars) {
+ // File otherjar = new File(new File(jarPath).getParent(),"dataminer-algorithms.jar");
+
+ if (jar.getName().equals("dataminer-algorithms.jar") || jar.getName().endsWith("_interface.jar")) {
+
+ //File otherjar = new File(new File(jarPath).getParent(), "dataminer-algorithms.jar");
+ File otherjar = jar;
+ if (otherjar.exists())
+ jarPath = otherjar.getAbsolutePath();
+
+ AnalysisLogger.getLogger().debug("Alternative Jar Path: " + jarPath);
+
+ jarFile = new JarFile(jarPath);
+ Enumeration en = jarFile.entries();
+
+ while (en.hasMoreElements()) {
+ JarEntry entry = en.nextElement();
+ String entryName = entry.getName();
+ packageName = packageName.replace('.', '/');
+
+ if (entryName != null && entryName.endsWith(".class") && entryName.startsWith(packageName)) {
+ try {
+ Class entryClass = Class.forName(entryName.substring(0, entryName.length() - 6).replace('/', '.'));
+
+ if (entryClass != null) {
+ result.add(entryClass);
+ }
+ } catch (Throwable e) {
+ // do nothing, just continue processing classes
+ }
+ }
+ }// while
+
+ }// if jar known
+ }
+ return result;
+ } catch (Exception e) {
+ throw e;
+ } finally {
+ try {
+ if (jarFile != null) {
+ jarFile.close();
+ }
+
+ } catch (Exception e) {
+ }
+ }
+ }
+
+ public static List> find(String scannedPackage) {
+ String scannedPath = scannedPackage.replace(DOT, SLASH);
+ URL scannedUrl = Thread.currentThread().getContextClassLoader().getResource(scannedPath);
+ if (scannedUrl == null) {
+ throw new IllegalArgumentException(String.format(BAD_PACKAGE_ERROR, scannedPath, scannedPackage));
+ }
+ File scannedDir = new File(scannedUrl.getFile());
+ System.out.println("scannedDir:" + scannedDir);
+ System.out.println("scannedUrl:" + scannedUrl);
+ System.out.println("scannedUrl List:" + scannedDir.listFiles());
+ List> classes = new ArrayList>();
+ for (File file : scannedDir.listFiles()) {
+ classes.addAll(find(file, scannedPackage));
+ }
+ return classes;
+ }
+
+ private static List> find(File file, String scannedPackage) {
+ List> classes = new ArrayList>();
+ String resource = scannedPackage + DOT + file.getName();
+ if (file.isDirectory()) {
+ for (File child : file.listFiles()) {
+ classes.addAll(find(child, resource));
+ }
+ } else if (resource.endsWith(CLASS_SUFFIX)) {
+ int endIndex = resource.length() - CLASS_SUFFIX.length();
+ String className = resource.substring(0, endIndex);
+ try {
+ if (!(className.contains("IClusterer") || className.contains("IEvaluator") || className.contains("IGenerator") || className.contains("IModeller") || className.contains("ITransducer")))
+ classes.add(Class.forName(className));
+ } catch (ClassNotFoundException ignore) {
+ }
+ }
+ return classes;
+ }
+
+ public static String readPage(URL url) throws Exception {
+
+ DefaultHttpClient httpClient = new DefaultHttpClient();
+ HttpParams params = httpClient.getParams();
+ HttpConnectionParams.setConnectionTimeout(params, 7 * 60000);
+ HttpConnectionParams.setSoTimeout(params, 7 * 60000);
+ HttpConnectionParams.setStaleCheckingEnabled(params, false);
+ HttpConnectionParams.setSoKeepalive(params, false);
+
+ HttpGet request = new HttpGet(url.toURI());
+ HttpResponse response = httpClient.execute(request);
+ System.out.println("URL executed!");
+ Reader reader = null;
+ try {
+ reader = new InputStreamReader(response.getEntity().getContent());
+ System.out.println("Read input stream!");
+ StringBuffer sb = new StringBuffer();
+ {
+ int read;
+ char[] cbuf = new char[1024];
+ while ((read = reader.read(cbuf)) != -1)
+ sb.append(cbuf, 0, read);
+ }
+
+ EntityUtils.consume(response.getEntity());
+ httpClient.getConnectionManager().shutdown();
+
+ return sb.toString();
+
+ } finally {
+
+ if (reader != null) {
+ try {
+ reader.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ public static Charset getConnectionCharset(URLConnection connection) {
+ String contentType = null;
+ try {
+ contentType = connection.getContentType();
+ } catch (Exception e) {
+ // specified charset is not found,
+ // skip it to return the default one
+ return Charset.defaultCharset();
+ }
+ if (contentType != null && contentType.length() > 0) {
+ contentType = contentType.toLowerCase();
+ String charsetName = extractCharsetName(contentType);
+ if (charsetName != null && charsetName.length() > 0) {
+ try {
+ return Charset.forName(charsetName);
+ } catch (Exception e) {
+ // specified charset is not found,
+ // skip it to return the default one
+ }
+ }
+ }
+
+ // return the default charset
+ return Charset.defaultCharset();
+ }
+
+ /**
+ * Extract the charset name form the content type string. Content type string is received from Content-Type header.
+ *
+ * @param contentType
+ * the content type string, must be not null.
+ * @return the found charset name or null if not found.
+ */
+ private static String extractCharsetName(String contentType) {
+ // split onto media types
+ final String[] mediaTypes = contentType.split(":");
+ if (mediaTypes.length > 0) {
+ // use only the first one, and split it on parameters
+ final String[] params = mediaTypes[0].split(";");
+
+ // find the charset parameter and return it's value
+ for (String each : params) {
+ each = each.trim();
+ if (each.startsWith("charset=")) {
+ // return the charset name
+ return each.substring(8).trim();
+ }
+ }
+ }
+
+ return null;
+ }
+
+ private static String RUNTIME_RESOURCE_NAME = "ReportsStoreGateway";
+ private static String CATEGORY_NAME = "Service";
+
+ public static String readPageNoHttpClient(URL url) throws Exception {
+ URLConnection conn = url.openConnection();
+ // pretend you're a browser (make my request from Java more browsery-like.)
+ conn.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11");
+ conn.setDoOutput(true);
+ conn.setAllowUserInteraction(true);
+ conn.setConnectTimeout(25 * 60000);
+ conn.setReadTimeout(25 * 60000);
+
+ Charset charset = getConnectionCharset(conn);
+
+ BufferedReader dis = new BufferedReader(new InputStreamReader(conn.getInputStream(), charset));
+ String inputLine;
+ StringBuffer pageBuffer = new StringBuffer();
+
+ // Loop through each line, looking for the closing head element
+ while ((inputLine = dis.readLine()) != null) {
+ pageBuffer.append(inputLine + "\r\n");
+ }
+
+ String page = pageBuffer.toString();
+ System.out.println(page);
+ conn.getInputStream().close();
+ return page;
+ }
+
+ public static String readPageHTTPHeader(URL url, String token) throws Exception {
+ URLConnection conn = url.openConnection();
+ // pretend you're a browser (make my request from Java more browsery-like.)
+ conn.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11");
+ conn.setDoOutput(true);
+ conn.setAllowUserInteraction(true);
+ conn.setConnectTimeout(25 * 60000);
+ conn.setReadTimeout(25 * 60000);
+ conn.setRequestProperty("gcube-token", token);
+
+ Charset charset = getConnectionCharset(conn);
+
+ BufferedReader dis = new BufferedReader(new InputStreamReader(conn.getInputStream(), charset));
+ String inputLine;
+ StringBuffer pageBuffer = new StringBuffer();
+
+ // Loop through each line, looking for the closing head element
+ while ((inputLine = dis.readLine()) != null) {
+ pageBuffer.append(inputLine + "\r\n");
+ }
+
+ String page = pageBuffer.toString();
+ System.out.println(page);
+ conn.getInputStream().close();
+ return page;
+ }
+
+ // build config.xml
+ public static void main(String[] args) throws Exception {
+ String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
+ List> classes = GetCapabilitiesChecker.find(packageS);
+
+ System.out.println(classes + "\n");
+
+ for (Class> classfind : classes) {
+ System.out.println("" + classfind.getName() + "");
+ }
+ // System.exit(0);
+ System.out.println("\n");
+ System.out.println(classes.size() + " algorithms");
+
+ }
+
+ public static void main1(String[] args) throws Exception {
+ String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
+ List> classes = GetCapabilitiesChecker.find(packageS);
+
+ System.out.println(classes + "\n");
+
+ for (Class> classfind : classes) {
+ System.out.println("" + classfind.getName() + "");
+ }
+ // System.exit(0);
+ System.out.println("\n");
+
+ for (Class> classfind : classes) {
+ System.out.println("http://localhost:8080/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&Identifier=" + classfind.getName() + "\n");
+ }
+
+ System.out.println("\n");
+ System.out.println("Checking errors in Processes descriptions");
+
+ int counter = 0;
+ for (Class> classfind : classes) {
+ String httplink = "http://statistical-manager-new.d4science.org:8080/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&Identifier=" + classfind.getName();
+ if (!httplink.contains("IClusterer") && !httplink.contains("IEvaluator") && !httplink.contains("IGenerator") && !httplink.contains("IModeller") && !httplink.contains("ITransducer")) {
+ String pageCheck = readPage(new URL(httplink));
+ counter++;
+ if (pageCheck.contains("ows:ExceptionText") || pageCheck.contains("Exception")) {
+ System.out.println("Reading Link: " + httplink);
+ System.out.println("ERROR:\n" + pageCheck);
+
+ }
+ }
+ }
+
+ System.out.println("Checked " + counter + " algorithms");
+
+ }
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/remote/RegressionTests.java b/src/main/java/org/gcube/dataanalysis/wps/remote/RegressionTests.java
new file mode 100644
index 0000000..0492fc4
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/remote/RegressionTests.java
@@ -0,0 +1,184 @@
+package org.gcube.dataanalysis.wps.remote;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.junit.Test;
+import org.slf4j.LoggerFactory;
+
+public class RegressionTests {
+
+ public static boolean checkHttpPage(String httplink, String page) {
+ if (page.contains("ows:ExceptionText") || page.contains("Exception")) {
+ System.out.println("Reading Link: " + httplink);
+ System.out.println("ERROR:\n" + page);
+ return false;
+ }
+ return true;
+ }
+
+ public static long getDateDiff(Date date1, Date date2, TimeUnit timeUnit) {
+ long diffInMillies = date2.getTime() - date1.getTime();
+ return timeUnit.convert(diffInMillies, TimeUnit.MILLISECONDS);
+ }
+
+ public static String prepareURL(String executionURL) throws Exception {
+
+ String firstPart = executionURL.substring(0, executionURL.indexOf("DataInputs=") + 11);
+ System.out.println("Execution URL:" + firstPart);
+ String secondPart = URLEncoder.encode(executionURL.substring(executionURL.indexOf("DataInputs=") + 11), "UTF-8");
+ System.out.println("Parameters: " + secondPart);
+ executionURL = firstPart + secondPart;
+ return executionURL;
+ }
+
+ public static void callHttps(String httpURLFile) throws Exception {
+ //ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
+ //root.setLevel(ch.qos.logback.classic.Level.OFF);
+
+ BufferedReader br = new BufferedReader(new FileReader(new File(httpURLFile)));
+ String line = br.readLine();
+ int counter = 0;
+ Date d0 = new Date(System.currentTimeMillis());
+ LinkedHashMap executionTimes = new LinkedHashMap();
+ while (line != null) {
+ boolean check = true;
+ if (line.contains(" - ") && !line.startsWith("#")) {
+ Date d00 = new Date(System.currentTimeMillis());
+ String algorithmName = line.substring(line.indexOf("-") + 1).trim();
+ String detailsURL = br.readLine();
+ if (!detailsURL.startsWith("http://"))
+ detailsURL = "http://" + detailsURL;
+ System.out.println("************************************************************");
+ System.out.println("TESTING ALGORITHM : " + algorithmName);
+ System.out.println("************************************************************");
+
+ String executionURL = br.readLine();
+ String firstPart = executionURL.substring(0, executionURL.indexOf("DataInputs=") + 11);
+ System.out.println("Execution URL:" + firstPart);
+ String secondPart = URLEncoder.encode(executionURL.substring(executionURL.indexOf("DataInputs=") + 11), "UTF-8");
+ System.out.println("Parameters: " + secondPart);
+ executionURL = firstPart + secondPart;
+
+ System.out.println("CHECKING DEFINITION " + algorithmName + " : " + detailsURL);
+ String pageCheck = GetCapabilitiesChecker.readPageNoHttpClient(new URL(detailsURL));
+ check = checkHttpPage(detailsURL, pageCheck);
+ System.out.println("DEFINITION CHECK " + check);
+ if (!check)
+ break;
+ System.out.println("EXECUTING " + algorithmName + " : " + executionURL);
+ executionURL = executionURL.replace(".nc", "_test" + UUID.randomUUID() + ".nc");
+ pageCheck = GetCapabilitiesChecker.readPageNoHttpClient(new URL(executionURL));
+ System.out.println("EXECUTION RESULT " + pageCheck);
+ check = checkHttpPage(executionURL, pageCheck);
+ System.out.println("EXECUTION CHECK " + check);
+ if (!check)
+ break;
+ Date d11 = new Date(System.currentTimeMillis());
+ System.out.println("EXECUTION TIME " + algorithmName + " : " + getDateDiff(d00, d11, TimeUnit.MILLISECONDS) + " s");
+ executionTimes.put(algorithmName, "" + getDateDiff(d00, d11, TimeUnit.MILLISECONDS));
+ System.out.println("-------------------------------------------------------------\n");
+ counter++;
+ }
+ if (!check) {
+ System.out.println("EXECUTION FAILURE! - BREAK -");
+ break;
+ }
+ line = br.readLine();
+
+ }
+
+ Date d1 = new Date(System.currentTimeMillis());
+ System.out.println("CHECKED " + counter + " PAGES in " + getDateDiff(d0, d1, TimeUnit.MINUTES) + " minutes " + " (" + getDateDiff(d0, d1, TimeUnit.SECONDS) + " s)");
+ System.out.println("EXECUTION TIMES SUMMARY:");
+ for (String key : executionTimes.keySet()) {
+ String time = executionTimes.get(key);
+ System.out.println(key + "," + time + " s");
+ }
+
+ br.close();
+ }
+
+ @Test
+ public void testDevNext() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-devNext.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testPreprod() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-pre.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testDevVRE() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-dev1.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testProd1() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-prod1.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testProd2() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-prod2.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testProd3() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-prod3.txt";
+ callHttps(algorithmsfile);
+ }
+
+
+ @Test
+ public void testProd4() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-prod4.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testProd5() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-prod5.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testProd6() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-prod6.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testProdGeneralProxy() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-proxy-general.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testProdBigDataProxy() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-proxy-bigdata.txt";
+ callHttps(algorithmsfile);
+ }
+
+ @Test
+ public void testEGI() throws Exception {
+ String algorithmsfile = "tests/Test-dataminer-EGI.txt";
+ callHttps(algorithmsfile);
+ }
+
+
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/remote/RemoteAligner.java b/src/main/java/org/gcube/dataanalysis/wps/remote/RemoteAligner.java
new file mode 100644
index 0000000..ec04b44
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/remote/RemoteAligner.java
@@ -0,0 +1,53 @@
+package org.gcube.dataanalysis.wps.remote;
+
+
+public class RemoteAligner extends RemoteInstaller{
+
+
+
+/*
+ public static void align(String dataminer,String password) throws Exception{
+ long t0 = System.currentTimeMillis();
+ String libdir = "cd ./tomcat/webapps/wps/WEB-INF/lib/";
+ String getAlgorithms = "wget -r -l1 -e robots=off --no-parent http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/";
+ String moveAlgorithms = "mv svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/* ~/tomcat/webapps/wps/WEB-INF/lib/";
+ String rmAlgorithms = "rm -r svn.research-infrastructures.eu/";
+
+ String libcfg = "cd ../../ecocfg/";
+ String getconfig = "wget -r -l1 -e robots=off --no-parent http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/cfg/";
+ String moveConfig = "mv svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/cfg/* ~/tomcat/webapps/wps/ecocfg/";
+ String rmConfig = "rm -r svn.research-infrastructures.eu/";
+
+ String configDir = "cd ../config/";
+
+ String changewpsconfig = "sed -Ei 's/localhost/"+dataminer+"/g' wps_config.xml";
+
+ String commands [] = {
+ sshConnection+dataminer,"2",
+ "y","0",
+ password,"0",
+ "ls -l","0",
+ "./stopContainer.sh","3",
+ libdir,"0",
+ getAlgorithms,"30",
+ moveAlgorithms,"1",
+ rmAlgorithms,"1",
+ libcfg,"0",
+ getconfig,"5",
+ moveConfig,"1",
+ rmConfig,"1",
+ configDir,"0",
+ changewpsconfig,"1",
+ "cd /home/gcube/","0",
+ "./startContainer.sh","60"
+ };
+
+
+ cmd2(commands);
+
+ System.out.println("Elapsed Time: "+(System.currentTimeMillis()-t0));
+ }
+
+ */
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/remote/RemoteInstaller.java b/src/main/java/org/gcube/dataanalysis/wps/remote/RemoteInstaller.java
new file mode 100644
index 0000000..dcfb6d6
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/remote/RemoteInstaller.java
@@ -0,0 +1,343 @@
+package org.gcube.dataanalysis.wps.remote;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
+import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
+
+public class RemoteInstaller {
+
+ static String sshConnection = "plink -ssh -i privatekeyss2.ppk gcube@";
+
+ static String print(InputStream std,OutputStream out,InputStream err) throws Exception{
+ int value = 0;
+ StringBuffer sb = new StringBuffer();
+ if (std.available () > 0) {
+ System.out.println ("STD:");
+ value = std.read ();
+ System.out.print ((char) value);
+ sb.append(""+(char) value);
+ while (std.available () > 0) {
+ value = std.read ();
+ System.out.print ((char) value);
+ sb.append(""+(char) value);
+ }
+ }
+
+ if (err.available () > 0) {
+ System.out.println ("ERR:");
+ value = err.read ();
+ System.out.print ((char) value);
+ sb.append(""+(char) value);
+ while (err.available () > 0) {
+ value = err.read ();
+ System.out.print ((char) value);
+ sb.append(""+(char) value);
+ }
+ }
+
+ System.out.println();
+
+ return sb.toString();
+ }
+
+ static void cmd1(String[] commands) throws Exception {
+ Runtime r = Runtime.getRuntime ();
+ Process p = r.exec (commands[0]);
+ InputStream std = p.getInputStream ();
+ OutputStream out = p.getOutputStream ();
+ InputStream err = p.getErrorStream ();
+ Thread.sleep (1000);
+ print(std,out,err);
+ int commandTries = 1;
+ for (int i=2;i0){
+ k=1;
+ lastline.append(value);
+ }
+
+ }
+
+ if (command.contains("./addAlgorithm")){
+ //if (!lastline.toString().contains("All done!")){
+ if (lastline.toString().contains("Exception:")){
+ if (commandTries<2){
+ commandTries++;
+ i = i-2; //retry the command
+ }
+ else{
+ System.err.println("Error at installing the algorithm!!!");
+ System.err.println("last line "+lastline);
+ System.exit(-1);
+ }
+ }
+ }
+
+ }
+
+ p.destroy ();
+
+ System.out.println("Ready!");
+ }
+
+ static void cmd2(String[] commands) throws Exception {
+ Runtime r = Runtime.getRuntime ();
+ Process p = r.exec (commands[0]);
+ InputStream std = p.getInputStream ();
+ OutputStream out = p.getOutputStream ();
+ InputStream err = p.getErrorStream ();
+ Thread.sleep (1000);
+ print(std,out,err);
+ int commandTries = 1;
+ for (int i=2;i0){
+ k=1;
+ lastline.append(value);
+ System.out.println("lastline: "+value.substring(0, Math.min(200,value.length())));
+ if (value.startsWith("gcube@dataminer")){
+ System.out.println("Prompt READY!");
+ break;
+ }
+ }
+
+ }
+
+ if (command.contains("./addAlgorithm")){
+ //if (!lastline.toString().contains("All done!")){
+ if (lastline.toString().contains("Exception:")){
+ if (commandTries<2){
+ commandTries++;
+ i = i-2; //retry the command
+ }
+ else{
+ System.err.println("Error at installing the algorithm!!!");
+ System.err.println("last line "+lastline);
+ System.exit(-1);
+ }
+ }
+ }
+
+ }
+
+ p.destroy ();
+
+ System.out.println("Ready!");
+ }
+
+ public static void startRobot(String dataminer,String password,String scope, boolean skipinstallerdownload) throws Exception{
+ String filepath = "DataMinerAlgorithms.txt";
+ startRobot(dataminer, password, scope, filepath, skipinstallerdownload);
+ }
+
+ public static void startRobot(String dataminer,String password,String scope, String filepath, boolean skipinstallerdownload) throws Exception{
+ long t0 = System.currentTimeMillis();
+ String installStrings = FileTools.loadString(filepath,"UTF-8");
+ //String[] install = installStrings.split("\n");
+ String[] install = installStrings.split("\n");
+ System.out.println("Algorithms to install "+install.length);
+ ArrayList installArray = new ArrayList(Arrays.asList(install));
+
+ String rmlogging = "rm ./tomcat/webapps/wps/WEB-INF/lib/log4j-over-slf4j-1.7.5.jar";
+ String rmlib1 = "rm ./tomcat/webapps/wps/WEB-INF/lib/STEP1VPAICCATBFTERetros-1.0.0.jar";
+// String rmlib1 = "rm ./tomcat/webapps/wps/WEB-INF/lib/ECOPATH*";
+ String rmlib2 = "rm ./tomcat/webapps/wps/WEB-INF/lib/TunaAtlasDataAccess-1.0.0.jar";
+ String rmlib3 = "rm ./tomcat/webapps/wps/WEB-INF/lib/dataminer-algorithms.jar";
+
+
+ String rmInstaller = "rm algorithmInstaller.zip";
+ String rmInstallerFolder = "rm -r ./algorithmInstaller";
+
+ String chmod = "chmod 777 tomcat/webapps/wps/config/*";
+ String rmSMState = "rm -r SmartGears/state/";
+
+ //String commands [] = {sshConnection+dataminer,"0",password,"0","ls -l","0",rmlogging,"0",chmod,"0","cd algorithmInstaller","0",install,"5",install,"5","cd ..","0",rmSMState,"0","./stopContainer.sh","3","./startContainer.sh","30"};
+ String forecommands [] = null;
+ if (!skipinstallerdownload){
+ String getInstaller = "wget --no-check-certificate https://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataminerAlgorithmsInstaller/package/algorithmInstaller.zip";
+ String unzipInstaller = "unzip algorithmInstaller.zip";
+ String choice= "N";
+ String mod= "chmod 777 -R algorithmInstaller/*";
+
+ String iforecommands [] = {sshConnection+dataminer,"2","y","0",password,"0","ls -l","0",rmlogging,"0",rmlib1,"0",rmlib2,"0",rmlib3,"0",rmInstaller,"0",rmInstallerFolder,"0",
+ chmod,"0",getInstaller,"2",unzipInstaller,"2",choice,"0",mod,"0","./stopContainer.sh","3",
+ "cd algorithmInstaller","0"};
+ forecommands = iforecommands;
+ }
+ else{
+ String iforecommands [] = {sshConnection+dataminer,"2","y","0",password,"0","ls -l","0",rmlogging,"0",rmlib1,"0",rmlib2,"0",rmlib3,"0",rmInstaller,"0",
+ chmod,"0","./stopContainer.sh","10","cd algorithmInstaller","0"};
+ forecommands = iforecommands;
+ }
+
+
+ String postcommands [] = {"cd ..","0","./startContainer.sh","30"};
+
+
+ ArrayList commandsArray = new ArrayList(Arrays.asList(forecommands));
+ String [] installers = installStrings.split("\n");
+ StringBuffer sb = new StringBuffer();
+ int max = 10;
+ int i =0;
+ for (String installer: installers){
+ int limit = 1000;
+ if (installer.length()>limit)
+ installer = installer.substring(0, limit)+"...\"";
+ sb.append(installer+"\n");
+ if (i == max)
+ {
+ i = 0;
+ String commands = sb.toString();
+ commandsArray.add(commands);
+ commandsArray.add("10");
+ sb = new StringBuffer();
+ }
+ else
+ i++;
+ }
+
+ if (sb.toString().length()>0)
+ {
+ commandsArray.add(sb.toString());
+ commandsArray.add("10");
+ }
+ /*
+ for (String installer:installArray) {
+ installer = installer.trim().replace("/gcube/devsec", scope);
+ if (installer.length()>0){
+ commandsArray.add(installer);
+ commandsArray.add("3");
+ }
+ }
+ */
+
+ commandsArray.addAll(new ArrayList(Arrays.asList(postcommands)));
+
+ String[] commands = new String[commandsArray.size()];
+ commands = commandsArray.toArray(commands);
+
+ cmd1(commands);
+ System.out.println("Elapsed Time: "+(System.currentTimeMillis()-t0));
+ }
+
+ public enum Environment {
+ PROD,
+ DEV,
+ PROTO,
+ PRE
+ }
+
+ public static String dumpInstallerFile(Environment env){
+
+ String url = "";
+ String file = "";
+
+ switch(env){
+ case PROD:
+ url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/prod/algorithms";
+ file ="ProdInstaller.txt";
+ break;
+ case DEV:
+ url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/dev/algorithms";
+ file ="DevInstaller.txt";
+ break;
+
+ case PROTO:
+ url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/proto/algorithms";
+ file ="ProtoInstaller.txt";
+ break;
+
+ case PRE:
+ url = "http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-analysis/DataMinerConfiguration/algorithms/preprod/algorithms";
+ file ="PreInstaller.txt";
+ break;
+
+ }
+
+ String answer = HttpRequest.sendGetRequest(url, "");
+
+ List installationStringsList = new ArrayList();
+ String answerbuffer = answer;
+ String install = "";
+ while (answerbuffer.length()>0){
+
+ for (int i=0;i<7;i++){
+
+ int pipe = answerbuffer.indexOf("|");
+ String token = answerbuffer.substring(0,pipe);
+ install+=token+"|";
+ answerbuffer = answerbuffer.substring(pipe+1);
+ }
+ install = install.trim();
+ if (!install.startsWith("|"))
+ install = "|"+install;
+ installationStringsList.add(install);
+ install = "";
+ }
+
+ StringBuffer sb = new StringBuffer();
+ for (String installer:installationStringsList){
+ if (installer.contains("deprecated"))
+ continue;
+ String [] tablerow = installer.split("\\|");
+ String row = tablerow[5];
+ if (row.contains("")){
+ row = row .replace("","").replace("", "");
+ row = row.trim();
+ sb.append(row+"\n");
+ }
+ }
+
+ try {
+ FileWriter fw = new FileWriter(new File(file),false) ;
+ fw.write(sb.toString());
+ fw.close();
+ return file;
+ }catch(Exception e){
+ e.printStackTrace();
+ return null;
+ }
+
+
+ }
+
+}
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/DBSCAN.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/DBSCAN.class
new file mode 100644
index 0000000..d655b28
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/DBSCAN.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/KMEANS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/KMEANS.class
new file mode 100644
index 0000000..ab1ab17
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/KMEANS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/LOF.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/LOF.class
new file mode 100644
index 0000000..d95fd2b
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/LOF.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/XMEANS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/XMEANS.class
new file mode 100644
index 0000000..6d93edd
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/clusterers/XMEANS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/DISCREPANCY_ANALYSIS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/DISCREPANCY_ANALYSIS.class
new file mode 100644
index 0000000..e75209c
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/DISCREPANCY_ANALYSIS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/HRS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/HRS.class
new file mode 100644
index 0000000..eb2b834
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/HRS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/MAPS_COMPARISON.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/MAPS_COMPARISON.class
new file mode 100644
index 0000000..f3ee38f
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/MAPS_COMPARISON.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/QUALITY_ANALYSIS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/QUALITY_ANALYSIS.class
new file mode 100644
index 0000000..68b25d9
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/evaluators/QUALITY_ANALYSIS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/BIONYM.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/BIONYM.class
new file mode 100644
index 0000000..6d573ff
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/BIONYM.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/CMSY.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/CMSY.class
new file mode 100644
index 0000000..861f86c
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/CMSY.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/ICCAT_VPA.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/ICCAT_VPA.class
new file mode 100644
index 0000000..0136b93
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/generators/ICCAT_VPA.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ABSENCE_CELLS_FROM_AQUAMAPS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ABSENCE_CELLS_FROM_AQUAMAPS.class
new file mode 100644
index 0000000..2cb8161
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ABSENCE_CELLS_FROM_AQUAMAPS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/BIONYM_LOCAL.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/BIONYM_LOCAL.class
new file mode 100644
index 0000000..ba9e29b
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/BIONYM_LOCAL.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/CSQUARE_COLUMN_CREATOR.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/CSQUARE_COLUMN_CREATOR.class
new file mode 100644
index 0000000..ce9be8f
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/CSQUARE_COLUMN_CREATOR.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ESRI_GRID_EXTRACTION.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ESRI_GRID_EXTRACTION.class
new file mode 100644
index 0000000..c1a4d28
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ESRI_GRID_EXTRACTION.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/FAO_OCEAN_AREA_COLUMN_CREATOR.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/FAO_OCEAN_AREA_COLUMN_CREATOR.class
new file mode 100644
index 0000000..398dcf7
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/FAO_OCEAN_AREA_COLUMN_CREATOR.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT.class
new file mode 100644
index 0000000..c4bb12e
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/GENERIC_CHARTS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/GENERIC_CHARTS.class
new file mode 100644
index 0000000..3f2b2dc
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/GENERIC_CHARTS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/GEO_CHART.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/GEO_CHART.class
new file mode 100644
index 0000000..524ec74
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/GEO_CHART.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/HCAF_FILTER.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/HCAF_FILTER.class
new file mode 100644
index 0000000..3bb0429
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/HCAF_FILTER.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/MAX_ENT_NICHE_MODELLING.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/MAX_ENT_NICHE_MODELLING.class
new file mode 100644
index 0000000..e48d00d
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/MAX_ENT_NICHE_MODELLING.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_MOST_OBSERVED_SPECIES.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_MOST_OBSERVED_SPECIES.class
new file mode 100644
index 0000000..a3416bd
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_MOST_OBSERVED_SPECIES.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_MOST_OBSERVED_TAXA.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_MOST_OBSERVED_TAXA.class
new file mode 100644
index 0000000..35001d1
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_MOST_OBSERVED_TAXA.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA.class
new file mode 100644
index 0000000..eeecdd7
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA.class
new file mode 100644
index 0000000..06b31f1
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA.class
new file mode 100644
index 0000000..adf651b
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_YEAR.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_YEAR.class
new file mode 100644
index 0000000..1336693
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_SPECIES_OBSERVATIONS_PER_YEAR.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_TAXA_OBSERVATIONS_PER_YEAR.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_TAXA_OBSERVATIONS_PER_YEAR.class
new file mode 100644
index 0000000..8e26337
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OBIS_TAXA_OBSERVATIONS_PER_YEAR.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OCCURRENCE_ENRICHMENT.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OCCURRENCE_ENRICHMENT.class
new file mode 100644
index 0000000..2df869a
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/OCCURRENCE_ENRICHMENT.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/PRESENCE_CELLS_GENERATION.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/PRESENCE_CELLS_GENERATION.class
new file mode 100644
index 0000000..0b6ee23
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/PRESENCE_CELLS_GENERATION.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/RASTER_DATA_PUBLISHER.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/RASTER_DATA_PUBLISHER.class
new file mode 100644
index 0000000..23c42de
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/RASTER_DATA_PUBLISHER.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/SGVM_INTERPOLATION.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/SGVM_INTERPOLATION.class
new file mode 100644
index 0000000..88aeb54
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/SGVM_INTERPOLATION.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/SUBMITQUERY.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/SUBMITQUERY.class
new file mode 100644
index 0000000..72dfd8a
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/SUBMITQUERY.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIMEEXTRACTION.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIMEEXTRACTION.class
new file mode 100644
index 0000000..d8dc195
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIMEEXTRACTION.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIMEEXTRACTION_TABLE.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIMEEXTRACTION_TABLE.class
new file mode 100644
index 0000000..d018da9
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIMEEXTRACTION_TABLE.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_GEO_CHART.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_GEO_CHART.class
new file mode 100644
index 0000000..1316249
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_GEO_CHART.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_SERIES_ANALYSIS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_SERIES_ANALYSIS.class
new file mode 100644
index 0000000..75b2a24
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_SERIES_ANALYSIS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_SERIES_CHARTS.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_SERIES_CHARTS.class
new file mode 100644
index 0000000..17f2753
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/TIME_SERIES_CHARTS.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/XYEXTRACTOR.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/XYEXTRACTOR.class
new file mode 100644
index 0000000..af6ca5d
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/XYEXTRACTOR.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/XYEXTRACTOR_TABLE.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/XYEXTRACTOR_TABLE.class
new file mode 100644
index 0000000..25d912a
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/XYEXTRACTOR_TABLE.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ZEXTRACTION.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ZEXTRACTION.class
new file mode 100644
index 0000000..8cb73d4
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ZEXTRACTION.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ZEXTRACTION_TABLE.class b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ZEXTRACTION_TABLE.class
new file mode 100644
index 0000000..5d9e03f
Binary files /dev/null and b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/transducerers/ZEXTRACTION_TABLE.class differ
diff --git a/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/util/Foo.java b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/util/Foo.java
new file mode 100644
index 0000000..93612b1
--- /dev/null
+++ b/src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/util/Foo.java
@@ -0,0 +1,5 @@
+package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.util;
+
+public class Foo {
+
+}
diff --git a/src/main/resources/templates/classtemplate.properties b/src/main/resources/templates/classtemplate.properties
new file mode 100644
index 0000000..850e0e9
--- /dev/null
+++ b/src/main/resources/templates/classtemplate.properties
@@ -0,0 +1,19 @@
+package=package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#;
+import=import java.io.File;\nimport java.net.URL;\nimport org.n52.wps.algorithm.annotation.MethodOrder;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.LinkedHashMap;\nimport java.io.StringWriter;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.xmlbeans.XmlObject;\nimport org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;\nimport org.n52.wps.algorithm.annotation.*;\nimport org.n52.wps.io.data.*;\nimport org.n52.wps.io.data.binding.complex.*;\nimport org.n52.wps.io.data.binding.literal.*;\nimport org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
+description=@Algorithm(statusSupported=true, title="#TITLE#", abstrakt="#ABSTRACT#", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#.#CLASSNAME#", version = "1.1.0")
+class_definition=public class #CLASSNAME# extends AbstractEcologicalEngineMapper implements #INTERFACE#{
+class_closure=@Execute public void run() throws Exception { super.run(); } }
+stringInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
+enumeratedInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", allowedValues= {#ALLOWED#}, defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
+doubleInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void set#IDMETHOD#(Double data) {inputs.put(\"#ID#\",""+data);}
+integerInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void set#IDMETHOD#(Integer data) {inputs.put(\"#ID#\",""+data);}
+booleanInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", allowedValues= {"true","false"}, title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void set#IDMETHOD#(Boolean data) {inputs.put(\"#ID#\",""+data);}
+csvFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GenericFileDataBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
+gislinkInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GisLinkDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
+d4scienceFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = D4ScienceDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
+d4scienceFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = D4ScienceFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
+pngFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = PngFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"image/png");} catch (Exception e) {e.printStackTrace();return null;}}
+csvFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = CsvFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
+gisLinkOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = GisLinkDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/geotiff");} catch (Exception e) {e.printStackTrace();return null;}}
+stringOutput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = LiteralStringBinding.class) public String get#IDMETHOD#() {return (String) outputs.get("#ID#");}
+optionalOutput=@MethodOrder()\n@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)\n public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
\ No newline at end of file
diff --git a/target/classes/templates/classtemplate.properties b/target/classes/templates/classtemplate.properties
new file mode 100644
index 0000000..850e0e9
--- /dev/null
+++ b/target/classes/templates/classtemplate.properties
@@ -0,0 +1,19 @@
+package=package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#;
+import=import java.io.File;\nimport java.net.URL;\nimport org.n52.wps.algorithm.annotation.MethodOrder;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.LinkedHashMap;\nimport java.io.StringWriter;\nimport org.apache.commons.io.IOUtils;\nimport org.apache.xmlbeans.XmlObject;\nimport org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;\nimport org.n52.wps.algorithm.annotation.*;\nimport org.n52.wps.io.data.*;\nimport org.n52.wps.io.data.binding.complex.*;\nimport org.n52.wps.io.data.binding.literal.*;\nimport org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
+description=@Algorithm(statusSupported=true, title="#TITLE#", abstrakt="#ABSTRACT#", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.#PACKAGE#.#CLASSNAME#", version = "1.1.0")
+class_definition=public class #CLASSNAME# extends AbstractEcologicalEngineMapper implements #INTERFACE#{
+class_closure=@Execute public void run() throws Exception { super.run(); } }
+stringInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
+enumeratedInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", allowedValues= {#ALLOWED#}, defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void set#IDMETHOD#(String data) {inputs.put(\"#ID#\",data);}
+doubleInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void set#IDMETHOD#(Double data) {inputs.put(\"#ID#\",""+data);}
+integerInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void set#IDMETHOD#(Integer data) {inputs.put(\"#ID#\",""+data);}
+booleanInput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataInput(abstrakt="#ABSTRACT#", defaultValue="#DEFAULT#", allowedValues= {"true","false"}, title="#TITLE#", identifier = "#ID#", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void set#IDMETHOD#(Boolean data) {inputs.put(\"#ID#\",""+data);}
+csvFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GenericFileDataBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
+gislinkInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = GisLinkDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
+d4scienceFileInput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataInput(abstrakt="#ABSTRACT#", title="#TITLE#", maxOccurs=1, minOccurs=1, identifier = "#ID#", binding = D4ScienceDataInputBinding.class) public void set#IDMETHOD#(GenericFileData file) {inputs.put(\"#ID#\",file);}
+d4scienceFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = D4ScienceFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
+pngFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = PngFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"image/png");} catch (Exception e) {e.printStackTrace();return null;}}
+csvFileOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = CsvFileDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
+gisLinkOutput=@MethodOrder(value=#ORDER_VALUE#)\n@ComplexDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = GisLinkDataBinding.class) public GenericFileData get#IDMETHOD#() {URL url=null;try {url = new URL((String) outputs.get("#ID#")); return new GenericFileData(url.openStream(),"application/geotiff");} catch (Exception e) {e.printStackTrace();return null;}}
+stringOutput=@MethodOrder(value=#ORDER_VALUE#)\n@LiteralDataOutput(abstrakt="#ABSTRACT#", title="#TITLE#", identifier = "#ID#", binding = LiteralStringBinding.class) public String get#IDMETHOD#() {return (String) outputs.get("#ID#");}
+optionalOutput=@MethodOrder()\n@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)\n public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
\ No newline at end of file