version 1.2
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataMiner@126560 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
c6aca19620
commit
54335dc3ca
|
@ -1,8 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
|
||||
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"/>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
23
.project
23
.project
|
@ -1,23 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>DataMiner</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,9 +0,0 @@
|
|||
eclipse.preferences.version=1
|
||||
encoding//src/main/java=UTF-8
|
||||
encoding//src/main/java/org/n52/wps/demo/CopyOfGPDemoFile.java=UTF-8
|
||||
encoding//src/main/java/org/n52/wps/demo/GPDemoFile.java=UTF-8
|
||||
encoding//src/main/java/org/n52/wps/demo/GPDemoSimple.java=UTF-8
|
||||
encoding//src/main/java/org/n52/wps/demo/GPDemoXML.java=UTF-8
|
||||
encoding//src/main/java/org/n52/wps/demo/TestIO.java=UTF-8
|
||||
encoding//src/main/resources=UTF-8
|
||||
encoding/<project>=UTF-8
|
|
@ -1,12 +0,0 @@
|
|||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -1,4 +0,0 @@
|
|||
activeProfiles=
|
||||
eclipse.preferences.version=1
|
||||
resolveWorkspaceProjects=true
|
||||
version=1
|
|
@ -1,120 +0,0 @@
|
|||
#1 - DBSCAN
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;epsilon=10;min_points=1;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
#2 - KMEANS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;k=3;max_runs=100;min_points=1;max_optimization_steps=10;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
#3 - LOF
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF&DataInputs=FeaturesColumnNames=depthmean|sstmnmax|salinitymean;PointsClusterLabel=OccClustersTest;minimal_points_lower_bound=2;PointsTable=http://goo.gl/VDzpch;minimal_points_upper_bound=10;distance_function=euclidian distance;lof_threshold=2;
|
||||
#4 - XMEANS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;min_points=1;maxIterations=100;minClusters=1;maxClusters=3;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
#5 - BIONYM
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;RawTaxaNamesTable=http://goo.gl/N9e3pC;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=species;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
#6 - BIONYM_LOCAL
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;SpeciesAuthorName=Gadus morhua
|
||||
#7 - ABSENCE CELLS FROM AQUAMAPS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Aquamaps_HSPEC=http://goo.gl/24XrmE;Take_Randomly=true;Species_Code=Fis-30189;
|
||||
#8 - HCAF_FILTER
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER&DataInputs=B_Box_Left_Lower_Lat=-17;B_Box_Right_Upper_Long=147;B_Box_Right_Upper_Lat=25;B_Box_Left_Lower_Long=89;Table_Label=wps_hcaf_filter;
|
||||
#9 - MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING&DataInputs=LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;Z=0;Layers=abea05ca-c9dc-43da-89d5-5fd3fa75023d|abea05ca-c9dc-43da-89d5-5fd3fa75023d;TimeIndex=0;MaxIterations=100;SpeciesName=Latimeria chalumnae;DefaultPrevalence=0.5;YResolution=0.5;OccurrencesTable=http://goo.gl/5cnKKp;XResolution=0.5;OutputTableLabel=wps_maxent;
|
||||
#10 - OCCURRENCE_ENRICHMENT
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT&DataInputs=OptionalFilter= ;OutputTableName=wps_enriched;FeaturesNames=temp;OccurrenceTable=http://goo.gl/ZfFcfE;LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;ScientificNameColumn=scientificname;Layers=abea05ca-c9dc-43da-89d5-5fd3fa75023d;TimeColumn=eventdate;Resolution=0.5;
|
||||
#11 - PRESENCE_CELLS_GENERATION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Species_Code=Fis-30189;
|
||||
#12 - FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR&DataInputs=Resolution=5;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_fao_area_column;
|
||||
#13 - FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT&DataInputs=Resolution=5;InputTable=http://goo.gl/yJTIBZ;Longitude_Column=centerlong;Quadrant_Column=quadrant;OutputTableName=wps_fao_quadrant;Latitude_Column=centerlat;
|
||||
#14 - CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR&DataInputs=CSquare_Resolution=0.1;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_csquare_column;
|
||||
#15 - GENERIC_CHARTS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS&DataInputs=Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;TopElementsNumber=10;Attributes=x|y
|
||||
#16 - GEO_CHART
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART&DataInputs=Latitude=y;Quantities=x;Longitude=x;InputTable=http://goo.gl/lWTvcw
|
||||
#17 - TIME_GEO_CHART
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART&DataInputs=Time=time;Latitude=x;Longitude=y;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;
|
||||
#18 - TIME_SERIES_CHARTS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS&DataInputs=Attributes=x|y|z;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;Time=time
|
||||
#19 - OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA&DataInputs=Selected species=Gadus morhua;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
#20 - OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
#21 - OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES&DataInputs=Species_number=10;End_year=2015;Start_year=2000;
|
||||
#22 - OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR&DataInputs=Level=GENUS;Selected taxonomy=Gadus|Merluccius;End_year=2015;Start_year=2000;
|
||||
#23 - OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;
|
||||
#24 - OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA&DataInputs=Species=Gadus morhua;Area=LME;End_year=2015;Start_year=2000;
|
||||
#25 - OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA&DataInputs=Level=GENUS;Taxa_number=10;End_year=2015;Start_year=2000;
|
||||
#26 - TIME_SERIES_ANALYSIS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS&DataInputs=FFT_Window_Samples=12;SSA_Points_to_Forecast=3;AggregationFunction=SUM;TimeSeriesTable=http://goo.gl/lWTvcw;Sensitivity=LOW;SSA_Window_in_Samples=12;SSA_EigenvaluesThreshold=0.7;ValueColum=fvalue
|
||||
#27 - MAPS_COMPARISON
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON&DataInputs=TimeIndex_1=0;ValuesComparisonThreshold=0.1;TimeIndex_2=0;Z=0;KThreshold=0.5;Layer_1=3fb7fd88-33d4-492d-b241-4e61299c44bb;Layer_2=3fb7fd88-33d4-492d-b241-4e61299c44bb;
|
||||
#28 - QUALITY_ANALYSIS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS&DataInputs=NegativeCasesTableKeyColumn=csquarecode;DistributionTableProbabilityColumn=probability;PositiveCasesTableKeyColumn=csquarecode;PositiveThreshold=0.8;PositiveCasesTable=http://goo.gl/8zWU7u;DistributionTableKeyColumn=csquarecode;DistributionTable=http://goo.gl/cXbg2n;NegativeThreshold=0.3;NegativeCasesTable=http://goo.gl/8zWU7u;
|
||||
#29 - DISCREPANCY_ANALYSIS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS&DataInputs=ComparisonThreshold=0.1;SecondTable=http://goo.gl/cXbg2n;FirstTable=http://goo.gl/BBk8iB;KThreshold=0.5;MaxSamples=10000;FirstTableProbabilityColumn=probability;SecondTableProbabilityColumn=probability;FirstTableCsquareColumn=csquarecode;SecondTableCsquareColumn=csquarecode
|
||||
#30 - XYEXTRACTOR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR&DataInputs=OutputTableLabel=wps_xy_extractor;Layer=abea05ca-c9dc-43da-89d5-5fd3fa75023d;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
#31 - TIMEEXTRACTION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION&DataInputs=SamplingFreq=-1;X=28;Y=38;Z=0;Resolution=0.5;Layer=abea05ca-c9dc-43da-89d5-5fd3fa75023d;OutputTableLabel=wps_time_extr
|
||||
#32 - ZEXTRACTION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION&DataInputs=OutputTableLabel=wps_z_extractor;Layer=abea05ca-c9dc-43da-89d5-5fd3fa75023d;Resolution=100;Y=38;TimeIndex=0;X=28
|
||||
#33 - XYEXTRACTOR_TABLE
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE&DataInputs=TimeIndex=0;Z=0;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;geoReferencedTableName=http://goo.gl/KjWYQG;valueColumn=oceanarea;XResolution=0.5;YResolution=0.5;BBox_UpperRightLong=50;BBox_LowerLeftLat=-60;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;OutputTableLabel=wps_xy_extr_table;
|
||||
#34 - TIMEEXTRACTION_TABLE
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE&DataInputs=Z=0;Resolution=0.5;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;Y=3.75;X=102.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=centerlong;SamplingFreq=-1;OutputTableLabel=wps_time_extr_table;
|
||||
#35 - ZEXTRACTION_TABLE
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE&DataInputs=TimeIndex=0;Resolution=1;filter= ;zColumn=centerlong;xColumn=centerlong;yColumn=centerlat;Y=0.25;X=0.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=oceanarea;OutputTableLabel=wps_z_table;
|
||||
#36 - HRS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS&DataInputs=PositiveCasesTable=http://goo.gl/VDzpch;NegativeCasesTable=http://goo.gl/VDzpch;OptionalCondition= ;ProjectingAreaTable=http://goo.gl/VDzpch;FeaturesColumns=depthmin|depthmax;
|
||||
#37 - ICCAT_VPA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA&DataInputs=StartYear=1950;shortComment=no;EndYear=2013;CAAFile=http://goo.gl/eKKsM4;PCAAFile=http://goo.gl/yYFGd1;CPUEFile=http://goo.gl/EeI58B;PwaaFile=http://goo.gl/h1rVz9;waaFile=http://goo.gl/uTyQdW;nCPUE=7;CPUE_cut=1;age_plus_group=10;
|
||||
#38 - SGVM_INTERPOLATION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION&DataInputs=headingAdjustment=0;maxspeedThr=6;minspeedThr=2;fm=0.5;margin=10;distscale=20;res=100;sigline=0.2;interval=120;equalDist=true;InputFile=http://goo.gl/i16kPw;npoints=10;method=cHs;
|
||||
#39 - CMSY
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=d7a4076c-e8c1-42fe-81e0-bdecb1e8074a&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY&DataInputs=IDsFile=http://goo.gl/9rg3qK;StocksFile=http://goo.gl/Mp2ZLY;SelectedStock=HLH_M07
|
||||
41 - ESRI_GRID_EXTRACTION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION
|
||||
http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION&DataInputs=Layer=fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
|
@ -1,126 +0,0 @@
|
|||
GET_CAPABILITIES
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b
|
||||
41 - ESRI_GRID_EXTRACTION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION&DataInputs=Layer=fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
1 - DBSCAN
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;epsilon=10;min_points=1;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
2 - KMEANS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;k=3;max_runs=100;min_points=1;max_optimization_steps=10;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
3 - LOF
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF&DataInputs=FeaturesColumnNames=depthmean|sstmnmax|salinitymean;PointsClusterLabel=OccClustersTest;minimal_points_lower_bound=2;PointsTable=http://goo.gl/VDzpch;minimal_points_upper_bound=10;distance_function=euclidian distance;lof_threshold=2;
|
||||
4 - XMEANS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;min_points=1;maxIterations=100;minClusters=1;maxClusters=3;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
5 - BIONYM
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;RawTaxaNamesTable=http://goo.gl/N9e3pC;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=species;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
6 - BIONYM_LOCAL
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;SpeciesAuthorName=Gadus morhua
|
||||
7 - ABSENCE CELLS FROM AQUAMAPS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Aquamaps_HSPEC=http://goo.gl/24XrmE;Take_Randomly=true;Species_Code=Fis-30189;
|
||||
8 - HCAF_FILTER
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER&DataInputs=B_Box_Left_Lower_Lat=-17;B_Box_Right_Upper_Long=147;B_Box_Right_Upper_Lat=25;B_Box_Left_Lower_Long=89;Table_Label=wps_hcaf_filter;
|
||||
9 - MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING&DataInputs=LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;Z=0;Layers=fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31;TimeIndex=0;MaxIterations=100;SpeciesName=Latimeria chalumnae;DefaultPrevalence=0.5;YResolution=0.5;OccurrencesTable=http://goo.gl/5cnKKp;XResolution=0.5;OutputTableLabel=wps_maxent;
|
||||
10 - OCCURRENCE_ENRICHMENT
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT&DataInputs=OptionalFilter= ;OutputTableName=wps_enriched;FeaturesNames=temp;OccurrenceTable=http://goo.gl/ZfFcfE;LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;ScientificNameColumn=scientificname;Layers=fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31;TimeColumn=eventdate;Resolution=0.5;
|
||||
11 - PRESENCE_CELLS_GENERATION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Species_Code=Fis-30189;
|
||||
12 - FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR&DataInputs=Resolution=5;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_fao_area_column;
|
||||
13 - FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT&DataInputs=Resolution=5;InputTable=http://goo.gl/yJTIBZ;Longitude_Column=centerlong;Quadrant_Column=quadrant;OutputTableName=wps_fao_quadrant;Latitude_Column=centerlat;
|
||||
14 - CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR&DataInputs=CSquare_Resolution=0.1;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_csquare_column;
|
||||
15 - GENERIC_CHARTS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS&DataInputs=Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;TopElementsNumber=10;Attributes=x|y
|
||||
16 - GEO_CHART
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART&DataInputs=Latitude=y;Quantities=x;Longitude=x;InputTable=http://goo.gl/lWTvcw
|
||||
17 - TIME_GEO_CHART
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART&DataInputs=Time=time;Latitude=x;Longitude=y;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;
|
||||
18 - TIME_SERIES_CHARTS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS&DataInputs=Attributes=x|y|z;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;Time=time
|
||||
19 - OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA&DataInputs=Selected species=Gadus morhua;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
20 - OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
21 - OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES&DataInputs=Species_number=10;End_year=2015;Start_year=2000;
|
||||
22 - OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR&DataInputs=Level=GENUS;Selected taxonomy=Gadus|Merluccius;End_year=2015;Start_year=2000;
|
||||
23 - OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;
|
||||
24 - OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA&DataInputs=Species=Gadus morhua;Area=LME;End_year=2015;Start_year=2000;
|
||||
25 - OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA&DataInputs=Level=GENUS;Taxa_number=10;End_year=2015;Start_year=2000;
|
||||
26 - TIME_SERIES_ANALYSIS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS&DataInputs=FFT_Window_Samples=12;SSA_Points_to_Forecast=3;AggregationFunction=SUM;TimeSeriesTable=http://goo.gl/lWTvcw;Sensitivity=LOW;SSA_Window_in_Samples=12;SSA_EigenvaluesThreshold=0.7;ValueColum=fvalue
|
||||
27 - MAPS_COMPARISON
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON&DataInputs=TimeIndex_1=0;ValuesComparisonThreshold=0.1;TimeIndex_2=0;Z=0;KThreshold=0.5;Layer_1=3fb7fd88-33d4-492d-b241-4e61299c44bb;Layer_2=3fb7fd88-33d4-492d-b241-4e61299c44bb;
|
||||
28 - QUALITY_ANALYSIS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS&DataInputs=NegativeCasesTableKeyColumn=csquarecode;DistributionTableProbabilityColumn=probability;PositiveCasesTableKeyColumn=csquarecode;PositiveThreshold=0.8;PositiveCasesTable=http://goo.gl/8zWU7u;DistributionTableKeyColumn=csquarecode;DistributionTable=http://goo.gl/cXbg2n;NegativeThreshold=0.3;NegativeCasesTable=http://goo.gl/8zWU7u;
|
||||
29 - DISCREPANCY_ANALYSIS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS&DataInputs=ComparisonThreshold=0.1;SecondTable=http://goo.gl/cXbg2n;FirstTable=http://goo.gl/BBk8iB;KThreshold=0.5;MaxSamples=10000;FirstTableProbabilityColumn=probability;SecondTableProbabilityColumn=probability;FirstTableCsquareColumn=csquarecode;SecondTableCsquareColumn=csquarecode
|
||||
30 - XYEXTRACTOR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR&DataInputs=OutputTableLabel=wps_xy_extractor;Layer=3fb7fd88-33d4-492d-b241-4e61299c44bb;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
31 - TIMEEXTRACTION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION&DataInputs=SamplingFreq=-1;X=28;Y=38;Z=0;Resolution=0.5;Layer=3fb7fd88-33d4-492d-b241-4e61299c44bb;OutputTableLabel=wps_time_extr
|
||||
32 - ZEXTRACTION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION&DataInputs=OutputTableLabel=wps_z_extractor;Layer=3fb7fd88-33d4-492d-b241-4e61299c44bb;Resolution=100;Y=38;TimeIndex=0;X=28
|
||||
33 - XYEXTRACTOR_TABLE
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE&DataInputs=TimeIndex=0;Z=0;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;geoReferencedTableName=http://goo.gl/KjWYQG;valueColumn=oceanarea;XResolution=0.5;YResolution=0.5;BBox_UpperRightLong=50;BBox_LowerLeftLat=-60;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;OutputTableLabel=wps_xy_extr_table;
|
||||
34 - TIMEEXTRACTION_TABLE
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE&DataInputs=Z=0;Resolution=0.5;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;Y=3.75;X=102.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=centerlong;SamplingFreq=-1;OutputTableLabel=wps_time_extr_table;
|
||||
35 - ZEXTRACTION_TABLE
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE&DataInputs=TimeIndex=0;Resolution=1;filter= ;zColumn=centerlong;xColumn=centerlong;yColumn=centerlat;Y=0.25;X=0.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=oceanarea;OutputTableLabel=wps_z_table;
|
||||
36 - HRS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS&DataInputs=PositiveCasesTable=http://goo.gl/VDzpch;NegativeCasesTable=http://goo.gl/VDzpch;OptionalCondition= ;ProjectingAreaTable=http://goo.gl/VDzpch;FeaturesColumns=depthmin|depthmax;
|
||||
37 - ICCAT_VPA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA&DataInputs=StartYear=1950;shortComment=no;EndYear=2013;CAAFile=http://goo.gl/eKKsM4;PCAAFile=http://goo.gl/yYFGd1;CPUEFile=http://goo.gl/EeI58B;PwaaFile=http://goo.gl/h1rVz9;waaFile=http://goo.gl/uTyQdW;nCPUE=7;CPUE_cut=1;age_plus_group=10;
|
||||
38 - SGVM_INTERPOLATION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION&DataInputs=headingAdjustment=0;maxspeedThr=6;minspeedThr=2;fm=0.5;margin=10;distscale=20;res=100;sigline=0.2;interval=120;equalDist=true;InputFile=http://goo.gl/i16kPw;npoints=10;method=cHs;
|
||||
40 - submit query
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY&DataInputs=DatabaseName=fishbase;Query=select * from food limit 100;Apply Smart Correction=false;Language=POSTGRES;ResourceName=FishBase;Read-Only Query=true;
|
||||
39 - CMSY
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY
|
||||
http://dataminer1-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY&DataInputs=IDsFile=http://goo.gl/9rg3qK;StocksFile=http://goo.gl/Mp2ZLY;SelectedStock=HLH_M07
|
|
@ -1,126 +0,0 @@
|
|||
#GET_CAPABILITIES
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b
|
||||
1 - DBSCAN
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;epsilon=10;min_points=1;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
2 - KMEANS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;k=3;max_runs=100;min_points=1;max_optimization_steps=10;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
3 - LOF
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF&DataInputs=FeaturesColumnNames=depthmean|sstmnmax|salinitymean;PointsClusterLabel=OccClustersTest;minimal_points_lower_bound=2;PointsTable=http://goo.gl/VDzpch;minimal_points_upper_bound=10;distance_function=euclidian distance;lof_threshold=2;
|
||||
4 - XMEANS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;min_points=1;maxIterations=100;minClusters=1;maxClusters=3;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
5 - BIONYM
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;RawTaxaNamesTable=http://goo.gl/N9e3pC;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=species;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
6 - BIONYM_LOCAL
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;SpeciesAuthorName=Gadus morhua
|
||||
7 - ABSENCE CELLS FROM AQUAMAPS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Aquamaps_HSPEC=http://goo.gl/24XrmE;Take_Randomly=true;Species_Code=Fis-30189;
|
||||
8 - HCAF_FILTER
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER&DataInputs=B_Box_Left_Lower_Lat=-17;B_Box_Right_Upper_Long=147;B_Box_Right_Upper_Lat=25;B_Box_Left_Lower_Long=89;Table_Label=wps_hcaf_filter;
|
||||
9 - MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING&DataInputs=LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;Z=0;Layers=fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31;TimeIndex=0;MaxIterations=100;SpeciesName=Latimeria chalumnae;DefaultPrevalence=0.5;YResolution=0.5;OccurrencesTable=http://goo.gl/5cnKKp;XResolution=0.5;OutputTableLabel=wps_maxent;
|
||||
10 - OCCURRENCE_ENRICHMENT
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT&DataInputs=OptionalFilter= ;OutputTableName=wps_enriched;FeaturesNames=temp;OccurrenceTable=http://goo.gl/ZfFcfE;LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;ScientificNameColumn=scientificname;Layers=fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31;TimeColumn=eventdate;Resolution=0.5;
|
||||
11 - PRESENCE_CELLS_GENERATION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Species_Code=Fis-30189;
|
||||
12 - FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR&DataInputs=Resolution=5;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_fao_area_column;
|
||||
13 - FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT&DataInputs=Resolution=5;InputTable=http://goo.gl/yJTIBZ;Longitude_Column=centerlong;Quadrant_Column=quadrant;OutputTableName=wps_fao_quadrant;Latitude_Column=centerlat;
|
||||
14 - CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR&DataInputs=CSquare_Resolution=0.1;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_csquare_column;
|
||||
15 - GENERIC_CHARTS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS&DataInputs=Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;TopElementsNumber=10;Attributes=x|y
|
||||
16 - GEO_CHART
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART&DataInputs=Latitude=y;Quantities=x;Longitude=x;InputTable=http://goo.gl/lWTvcw
|
||||
17 - TIME_GEO_CHART
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART&DataInputs=Time=time;Latitude=x;Longitude=y;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;
|
||||
18 - TIME_SERIES_CHARTS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS&DataInputs=Attributes=x|y|z;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;Time=time
|
||||
19 - OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA&DataInputs=Selected species=Gadus morhua;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
20 - OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
21 - OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES&DataInputs=Species_number=10;End_year=2015;Start_year=2000;
|
||||
22 - OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR&DataInputs=Level=GENUS;Selected taxonomy=Gadus|Merluccius;End_year=2015;Start_year=2000;
|
||||
23 - OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;
|
||||
24 - OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA&DataInputs=Species=Gadus morhua;Area=LME;End_year=2015;Start_year=2000;
|
||||
25 - OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA&DataInputs=Level=GENUS;Taxa_number=10;End_year=2015;Start_year=2000;
|
||||
26 - TIME_SERIES_ANALYSIS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS&DataInputs=FFT_Window_Samples=12;SSA_Points_to_Forecast=3;AggregationFunction=SUM;TimeSeriesTable=http://goo.gl/lWTvcw;Sensitivity=LOW;SSA_Window_in_Samples=12;SSA_EigenvaluesThreshold=0.7;ValueColum=fvalue
|
||||
27 - MAPS_COMPARISON
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON&DataInputs=TimeIndex_1=0;ValuesComparisonThreshold=0.1;TimeIndex_2=0;Z=0;KThreshold=0.5;Layer_1=3fb7fd88-33d4-492d-b241-4e61299c44bb;Layer_2=3fb7fd88-33d4-492d-b241-4e61299c44bb;
|
||||
28 - QUALITY_ANALYSIS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS&DataInputs=NegativeCasesTableKeyColumn=csquarecode;DistributionTableProbabilityColumn=probability;PositiveCasesTableKeyColumn=csquarecode;PositiveThreshold=0.8;PositiveCasesTable=http://goo.gl/8zWU7u;DistributionTableKeyColumn=csquarecode;DistributionTable=http://goo.gl/cXbg2n;NegativeThreshold=0.3;NegativeCasesTable=http://goo.gl/8zWU7u;
|
||||
29 - DISCREPANCY_ANALYSIS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS&DataInputs=ComparisonThreshold=0.1;SecondTable=http://goo.gl/cXbg2n;FirstTable=http://goo.gl/BBk8iB;KThreshold=0.5;MaxSamples=10000;FirstTableProbabilityColumn=probability;SecondTableProbabilityColumn=probability;FirstTableCsquareColumn=csquarecode;SecondTableCsquareColumn=csquarecode
|
||||
30 - XYEXTRACTOR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR&DataInputs=OutputTableLabel=wps_xy_extractor;Layer=3fb7fd88-33d4-492d-b241-4e61299c44bb;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
31 - TIMEEXTRACTION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION&DataInputs=SamplingFreq=-1;X=28;Y=38;Z=0;Resolution=0.5;Layer=3fb7fd88-33d4-492d-b241-4e61299c44bb;OutputTableLabel=wps_time_extr
|
||||
32 - ZEXTRACTION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION&DataInputs=OutputTableLabel=wps_z_extractor;Layer=3fb7fd88-33d4-492d-b241-4e61299c44bb;Resolution=100;Y=38;TimeIndex=0;X=28
|
||||
33 - XYEXTRACTOR_TABLE
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE&DataInputs=TimeIndex=0;Z=0;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;geoReferencedTableName=http://goo.gl/KjWYQG;valueColumn=oceanarea;XResolution=0.5;YResolution=0.5;BBox_UpperRightLong=50;BBox_LowerLeftLat=-60;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;OutputTableLabel=wps_xy_extr_table;
|
||||
34 - TIMEEXTRACTION_TABLE
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE&DataInputs=Z=0;Resolution=0.5;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;Y=3.75;X=102.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=centerlong;SamplingFreq=-1;OutputTableLabel=wps_time_extr_table;
|
||||
35 - ZEXTRACTION_TABLE
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE&DataInputs=TimeIndex=0;Resolution=1;filter= ;zColumn=centerlong;xColumn=centerlong;yColumn=centerlat;Y=0.25;X=0.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=oceanarea;OutputTableLabel=wps_z_table;
|
||||
36 - HRS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS&DataInputs=PositiveCasesTable=http://goo.gl/VDzpch;NegativeCasesTable=http://goo.gl/VDzpch;OptionalCondition= ;ProjectingAreaTable=http://goo.gl/VDzpch;FeaturesColumns=depthmin|depthmax;
|
||||
37 - ICCAT_VPA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA&DataInputs=StartYear=1950;shortComment=no;EndYear=2013;CAAFile=http://goo.gl/eKKsM4;PCAAFile=http://goo.gl/yYFGd1;CPUEFile=http://goo.gl/EeI58B;PwaaFile=http://goo.gl/h1rVz9;waaFile=http://goo.gl/uTyQdW;nCPUE=7;CPUE_cut=1;age_plus_group=10;
|
||||
38 - SGVM_INTERPOLATION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION&DataInputs=headingAdjustment=0;maxspeedThr=6;minspeedThr=2;fm=0.5;margin=10;distscale=20;res=100;sigline=0.2;interval=120;equalDist=true;InputFile=http://goo.gl/i16kPw;npoints=10;method=cHs;
|
||||
39 - CMSY
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY&DataInputs=IDsFile=http://goo.gl/9rg3qK;StocksFile=http://goo.gl/Mp2ZLY;SelectedStock=HLH_M07
|
||||
40 - submit query
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY&DataInputs=DatabaseName=fishbase;Query=select * from food limit 100;Apply Smart Correction=false;Language=POSTGRES;ResourceName=FishBase;Read-Only Query=true;
|
||||
41 - ESRI_GRID_EXTRACTION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION
|
||||
http://dataminer2-d-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=4ccc2c35-60c9-4c9b-9800-616538d5d48b&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION&DataInputs=Layer=fc9ac2f4-a2bd-43d1-a361-ac67c5ceac31;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
|
@ -1,126 +0,0 @@
|
|||
GET_CAPABILITIES
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414
|
||||
41 - ESRI_GRID_EXTRACTION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION&DataInputs=Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
1 - DBSCAN
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;epsilon=10;min_points=1;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
2 - KMEANS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;k=3;max_runs=100;min_points=1;max_optimization_steps=10;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
3 - LOF
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF&DataInputs=FeaturesColumnNames=depthmean|sstmnmax|salinitymean;PointsClusterLabel=OccClustersTest;minimal_points_lower_bound=2;PointsTable=http://goo.gl/VDzpch;minimal_points_upper_bound=10;distance_function=euclidian distance;lof_threshold=2;
|
||||
4 - XMEANS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;min_points=1;maxIterations=100;minClusters=1;maxClusters=3;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
5 - BIONYM
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;RawTaxaNamesTable=http://goo.gl/N9e3pC;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=species;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
6 - BIONYM_LOCAL
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;SpeciesAuthorName=Gadus morhua
|
||||
7 - ABSENCE CELLS FROM AQUAMAPS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Aquamaps_HSPEC=http://goo.gl/24XrmE;Take_Randomly=true;Species_Code=Fis-30189;
|
||||
8 - HCAF_FILTER
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER&DataInputs=B_Box_Left_Lower_Lat=-17;B_Box_Right_Upper_Long=147;B_Box_Right_Upper_Lat=25;B_Box_Left_Lower_Long=89;Table_Label=wps_hcaf_filter;
|
||||
9 - MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING&DataInputs=LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;Z=0;Layers=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;TimeIndex=0;MaxIterations=100;SpeciesName=Latimeria chalumnae;DefaultPrevalence=0.5;YResolution=0.5;OccurrencesTable=http://goo.gl/5cnKKp;XResolution=0.5;OutputTableLabel=wps_maxent;
|
||||
10 - OCCURRENCE_ENRICHMENT
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT&DataInputs=OptionalFilter= ;OutputTableName=wps_enriched;FeaturesNames=temp;OccurrenceTable=http://goo.gl/ZfFcfE;LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;ScientificNameColumn=scientificname;Layers=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;TimeColumn=eventdate;Resolution=0.5;
|
||||
11 - PRESENCE_CELLS_GENERATION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Species_Code=Fis-30189;
|
||||
12 - FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR&DataInputs=Resolution=5;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_fao_area_column;
|
||||
13 - FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT&DataInputs=Resolution=5;InputTable=http://goo.gl/yJTIBZ;Longitude_Column=centerlong;Quadrant_Column=quadrant;OutputTableName=wps_fao_quadrant;Latitude_Column=centerlat;
|
||||
14 - CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR&DataInputs=CSquare_Resolution=0.1;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_csquare_column;
|
||||
15 - GENERIC_CHARTS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS&DataInputs=Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;TopElementsNumber=10;Attributes=x|y
|
||||
16 - GEO_CHART
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART&DataInputs=Latitude=y;Quantities=x;Longitude=x;InputTable=http://goo.gl/lWTvcw
|
||||
17 - TIME_GEO_CHART
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART&DataInputs=Time=time;Latitude=x;Longitude=y;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;
|
||||
18 - TIME_SERIES_CHARTS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS&DataInputs=Attributes=x|y|z;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;Time=time
|
||||
19 - OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA&DataInputs=Selected species=Gadus morhua;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
20 - OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
21 - OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES&DataInputs=Species_number=10;End_year=2015;Start_year=2000;
|
||||
22 - OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR&DataInputs=Level=GENUS;Selected taxonomy=Gadus|Merluccius;End_year=2015;Start_year=2000;
|
||||
23 - OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;
|
||||
24 - OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA&DataInputs=Species=Gadus morhua;Area=LME;End_year=2015;Start_year=2000;
|
||||
25 - OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA&DataInputs=Level=GENUS;Taxa_number=10;End_year=2015;Start_year=2000;
|
||||
26 - TIME_SERIES_ANALYSIS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS&DataInputs=FFT_Window_Samples=12;SSA_Points_to_Forecast=3;AggregationFunction=SUM;TimeSeriesTable=http://goo.gl/lWTvcw;Sensitivity=LOW;SSA_Window_in_Samples=12;SSA_EigenvaluesThreshold=0.7;ValueColum=fvalue
|
||||
27 - MAPS_COMPARISON
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON&DataInputs=TimeIndex_1=0;ValuesComparisonThreshold=0.1;TimeIndex_2=0;Z=0;KThreshold=0.5;Layer_1=04e61cb8-3c32-47fe-823c-80ac3d417a0b;Layer_2=04e61cb8-3c32-47fe-823c-80ac3d417a0b;
|
||||
28 - QUALITY_ANALYSIS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS&DataInputs=NegativeCasesTableKeyColumn=csquarecode;DistributionTableProbabilityColumn=probability;PositiveCasesTableKeyColumn=csquarecode;PositiveThreshold=0.8;PositiveCasesTable=http://goo.gl/8zWU7u;DistributionTableKeyColumn=csquarecode;DistributionTable=http://goo.gl/cXbg2n;NegativeThreshold=0.3;NegativeCasesTable=http://goo.gl/8zWU7u;
|
||||
29 - DISCREPANCY_ANALYSIS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS&DataInputs=ComparisonThreshold=0.1;SecondTable=http://goo.gl/cXbg2n;FirstTable=http://goo.gl/BBk8iB;KThreshold=0.5;MaxSamples=10000;FirstTableProbabilityColumn=probability;SecondTableProbabilityColumn=probability;FirstTableCsquareColumn=csquarecode;SecondTableCsquareColumn=csquarecode
|
||||
30 - XYEXTRACTOR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR&DataInputs=OutputTableLabel=wps_xy_extractor;Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
31 - TIMEEXTRACTION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION&DataInputs=SamplingFreq=-1;X=28;Y=38;Z=0;Resolution=0.5;Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;OutputTableLabel=wps_time_extr
|
||||
32 - ZEXTRACTION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION&DataInputs=OutputTableLabel=wps_z_extractor;Layer=ca40c6c1-9350-4a9d-a3a0-3dd534272ce2;Resolution=100;Y=38;TimeIndex=0;X=28
|
||||
33 - XYEXTRACTOR_TABLE
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE&DataInputs=TimeIndex=0;Z=0;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;geoReferencedTableName=http://goo.gl/KjWYQG;valueColumn=oceanarea;XResolution=0.5;YResolution=0.5;BBox_UpperRightLong=50;BBox_LowerLeftLat=-60;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;OutputTableLabel=wps_xy_extr_table;
|
||||
34 - TIMEEXTRACTION_TABLE
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE&DataInputs=Z=0;Resolution=0.5;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;Y=3.75;X=102.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=centerlong;SamplingFreq=-1;OutputTableLabel=wps_time_extr_table;
|
||||
35 - ZEXTRACTION_TABLE
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE&DataInputs=TimeIndex=0;Resolution=1;filter= ;zColumn=centerlong;xColumn=centerlong;yColumn=centerlat;Y=0.25;X=0.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=oceanarea;OutputTableLabel=wps_z_table;
|
||||
36 - HRS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS&DataInputs=PositiveCasesTable=http://goo.gl/VDzpch;NegativeCasesTable=http://goo.gl/VDzpch;OptionalCondition= ;ProjectingAreaTable=http://goo.gl/VDzpch;FeaturesColumns=depthmin|depthmax;
|
||||
40 - submit query
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY&DataInputs=DatabaseName=fishbase;Query=select * from food limit 100;Apply Smart Correction=false;Language=POSTGRES;ResourceName=FishBase;Read-Only Query=true;
|
||||
38 - SGVM_INTERPOLATION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION&DataInputs=headingAdjustment=0;maxspeedThr=6;minspeedThr=2;fm=0.5;margin=10;distscale=20;res=100;sigline=0.2;interval=120;equalDist=true;InputFile=http://goo.gl/i16kPw;npoints=10;method=cHs;
|
||||
37 - ICCAT_VPA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA&DataInputs=StartYear=1950;shortComment=no;EndYear=2013;CAAFile=http://goo.gl/eKKsM4;PCAAFile=http://goo.gl/yYFGd1;CPUEFile=http://goo.gl/EeI58B;PwaaFile=http://goo.gl/h1rVz9;waaFile=http://goo.gl/uTyQdW;nCPUE=7;CPUE_cut=1;age_plus_group=10;
|
||||
39 - CMSY
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY
|
||||
http://dataminer1-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY&DataInputs=IDsFile=http://goo.gl/9rg3qK;StocksFile=http://goo.gl/Mp2ZLY;SelectedStock=HLH_M07
|
|
@ -1,126 +0,0 @@
|
|||
GET_CAPABILITIES
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=GetCapabilities&Service=WPS&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414
|
||||
41 - ESRI_GRID_EXTRACTION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION&DataInputs=Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
1 - DBSCAN
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;epsilon=10;min_points=1;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
2 - KMEANS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;k=3;max_runs=100;min_points=1;max_optimization_steps=10;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
3 - LOF
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF&DataInputs=FeaturesColumnNames=depthmean|sstmnmax|salinitymean;PointsClusterLabel=OccClustersTest;minimal_points_lower_bound=2;PointsTable=http://goo.gl/VDzpch;minimal_points_upper_bound=10;distance_function=euclidian distance;lof_threshold=2;
|
||||
4 - XMEANS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;min_points=1;maxIterations=100;minClusters=1;maxClusters=3;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
5 - BIONYM
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;RawTaxaNamesTable=http://goo.gl/N9e3pC;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=species;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
6 - BIONYM_LOCAL
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;SpeciesAuthorName=Gadus morhua
|
||||
7 - ABSENCE CELLS FROM AQUAMAPS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Aquamaps_HSPEC=http://goo.gl/24XrmE;Take_Randomly=true;Species_Code=Fis-30189;
|
||||
8 - HCAF_FILTER
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER&DataInputs=B_Box_Left_Lower_Lat=-17;B_Box_Right_Upper_Long=147;B_Box_Right_Upper_Lat=25;B_Box_Left_Lower_Long=89;Table_Label=wps_hcaf_filter;
|
||||
9 - MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING&DataInputs=LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;Z=0;Layers=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;TimeIndex=0;MaxIterations=100;SpeciesName=Latimeria chalumnae;DefaultPrevalence=0.5;YResolution=0.5;OccurrencesTable=http://goo.gl/5cnKKp;XResolution=0.5;OutputTableLabel=wps_maxent;
|
||||
10 - OCCURRENCE_ENRICHMENT
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT&DataInputs=OptionalFilter= ;OutputTableName=wps_enriched;FeaturesNames=temp;OccurrenceTable=http://goo.gl/ZfFcfE;LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;ScientificNameColumn=scientificname;Layers=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;TimeColumn=eventdate;Resolution=0.5;
|
||||
11 - PRESENCE_CELLS_GENERATION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Species_Code=Fis-30189;
|
||||
12 - FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR&DataInputs=Resolution=5;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_fao_area_column;
|
||||
13 - FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT&DataInputs=Resolution=5;InputTable=http://goo.gl/yJTIBZ;Longitude_Column=centerlong;Quadrant_Column=quadrant;OutputTableName=wps_fao_quadrant;Latitude_Column=centerlat;
|
||||
14 - CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR&DataInputs=CSquare_Resolution=0.1;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_csquare_column;
|
||||
15 - GENERIC_CHARTS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS&DataInputs=Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;TopElementsNumber=10;Attributes=x|y
|
||||
16 - GEO_CHART
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART&DataInputs=Latitude=y;Quantities=x;Longitude=x;InputTable=http://goo.gl/lWTvcw
|
||||
17 - TIME_GEO_CHART
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART&DataInputs=Time=time;Latitude=x;Longitude=y;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;
|
||||
18 - TIME_SERIES_CHARTS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS&DataInputs=Attributes=x|y|z;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;Time=time
|
||||
19 - OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA&DataInputs=Selected species=Gadus morhua;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
20 - OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
21 - OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES&DataInputs=Species_number=10;End_year=2015;Start_year=2000;
|
||||
22 - OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR&DataInputs=Level=GENUS;Selected taxonomy=Gadus|Merluccius;End_year=2015;Start_year=2000;
|
||||
23 - OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;
|
||||
24 - OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA&DataInputs=Species=Gadus morhua;Area=LME;End_year=2015;Start_year=2000;
|
||||
25 - OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA&DataInputs=Level=GENUS;Taxa_number=10;End_year=2015;Start_year=2000;
|
||||
26 - TIME_SERIES_ANALYSIS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS&DataInputs=FFT_Window_Samples=12;SSA_Points_to_Forecast=3;AggregationFunction=SUM;TimeSeriesTable=http://goo.gl/lWTvcw;Sensitivity=LOW;SSA_Window_in_Samples=12;SSA_EigenvaluesThreshold=0.7;ValueColum=fvalue
|
||||
27 - MAPS_COMPARISON
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON&DataInputs=TimeIndex_1=0;ValuesComparisonThreshold=0.1;TimeIndex_2=0;Z=0;KThreshold=0.5;Layer_1=04e61cb8-3c32-47fe-823c-80ac3d417a0b;Layer_2=04e61cb8-3c32-47fe-823c-80ac3d417a0b;
|
||||
28 - QUALITY_ANALYSIS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS&DataInputs=NegativeCasesTableKeyColumn=csquarecode;DistributionTableProbabilityColumn=probability;PositiveCasesTableKeyColumn=csquarecode;PositiveThreshold=0.8;PositiveCasesTable=http://goo.gl/8zWU7u;DistributionTableKeyColumn=csquarecode;DistributionTable=http://goo.gl/cXbg2n;NegativeThreshold=0.3;NegativeCasesTable=http://goo.gl/8zWU7u;
|
||||
29 - DISCREPANCY_ANALYSIS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS&DataInputs=ComparisonThreshold=0.1;SecondTable=http://goo.gl/cXbg2n;FirstTable=http://goo.gl/BBk8iB;KThreshold=0.5;MaxSamples=10000;FirstTableProbabilityColumn=probability;SecondTableProbabilityColumn=probability;FirstTableCsquareColumn=csquarecode;SecondTableCsquareColumn=csquarecode
|
||||
30 - XYEXTRACTOR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR&DataInputs=OutputTableLabel=wps_xy_extractor;Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
31 - TIMEEXTRACTION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION&DataInputs=SamplingFreq=-1;X=28;Y=38;Z=0;Resolution=0.5;Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;OutputTableLabel=wps_time_extr
|
||||
32 - ZEXTRACTION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION&DataInputs=OutputTableLabel=wps_z_extractor;Layer=ca40c6c1-9350-4a9d-a3a0-3dd534272ce2;Resolution=100;Y=38;TimeIndex=0;X=28
|
||||
33 - XYEXTRACTOR_TABLE
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE&DataInputs=TimeIndex=0;Z=0;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;geoReferencedTableName=http://goo.gl/KjWYQG;valueColumn=oceanarea;XResolution=0.5;YResolution=0.5;BBox_UpperRightLong=50;BBox_LowerLeftLat=-60;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;OutputTableLabel=wps_xy_extr_table;
|
||||
34 - TIMEEXTRACTION_TABLE
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE&DataInputs=Z=0;Resolution=0.5;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;Y=3.75;X=102.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=centerlong;SamplingFreq=-1;OutputTableLabel=wps_time_extr_table;
|
||||
35 - ZEXTRACTION_TABLE
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE&DataInputs=TimeIndex=0;Resolution=1;filter= ;zColumn=centerlong;xColumn=centerlong;yColumn=centerlat;Y=0.25;X=0.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=oceanarea;OutputTableLabel=wps_z_table;
|
||||
36 - HRS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS&DataInputs=PositiveCasesTable=http://goo.gl/VDzpch;NegativeCasesTable=http://goo.gl/VDzpch;OptionalCondition= ;ProjectingAreaTable=http://goo.gl/VDzpch;FeaturesColumns=depthmin|depthmax;
|
||||
40 - submit query
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY&DataInputs=DatabaseName=fishbase;Query=select * from food limit 100;Apply Smart Correction=false;Language=POSTGRES;ResourceName=FishBase;Read-Only Query=true;
|
||||
38 - SGVM_INTERPOLATION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION&DataInputs=headingAdjustment=0;maxspeedThr=6;minspeedThr=2;fm=0.5;margin=10;distscale=20;res=100;sigline=0.2;interval=120;equalDist=true;InputFile=http://goo.gl/i16kPw;npoints=10;method=cHs;
|
||||
37 - ICCAT_VPA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA&DataInputs=StartYear=1950;shortComment=no;EndYear=2013;CAAFile=http://goo.gl/eKKsM4;PCAAFile=http://goo.gl/yYFGd1;CPUEFile=http://goo.gl/EeI58B;PwaaFile=http://goo.gl/h1rVz9;waaFile=http://goo.gl/uTyQdW;nCPUE=7;CPUE_cut=1;age_plus_group=10;
|
||||
39 - CMSY
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY
|
||||
http://dataminer2-p-d4s.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY&DataInputs=IDsFile=http://goo.gl/9rg3qK;StocksFile=http://goo.gl/Mp2ZLY;SelectedStock=HLH_M07
|
|
@ -1,117 +0,0 @@
|
|||
1 - DBSCAN
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;epsilon=10;min_points=1;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
2 - KMEANS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;k=3;max_runs=100;min_points=1;max_optimization_steps=10;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
3 - LOF
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF&DataInputs=FeaturesColumnNames=depthmean|sstmnmax|salinitymean;PointsClusterLabel=OccClustersTest;minimal_points_lower_bound=2;PointsTable=http://goo.gl/VDzpch;minimal_points_upper_bound=10;distance_function=euclidian distance;lof_threshold=2;
|
||||
4 - XMEANS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS&DataInputs=OccurrencePointsClusterLabel=OccClustersTest;min_points=1;maxIterations=100;minClusters=1;maxClusters=3;OccurrencePointsTable=http://goo.gl/VDzpch;FeaturesColumnNames=depthmean|sstmnmax|salinitymean;
|
||||
5 - BIONYM
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;RawTaxaNamesTable=http://goo.gl/N9e3pC;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;RawNamesColumn=species;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;OutputTableLabel=bionymwps;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;
|
||||
6 - BIONYM_LOCAL
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL&DataInputs=Matcher_1=LEVENSHTEIN;Matcher_4=NONE;Matcher_5=NONE;Matcher_2=NONE;Matcher_3=NONE;Threshold_1=0.6;Threshold_2=0.6;Accuracy_vs_Speed=MAX_ACCURACY;MaxResults_2=10;MaxResults_1=10;Threshold_3=0.4;Taxa_Authority_File=FISHBASE;Parser_Name=SIMPLE;MaxResults_4=0;Threshold_4=0;MaxResults_3=0;MaxResults_5=0;Threshold_5=0;Use_Stemmed_Genus_and_Species=false;Activate_Preparsing_Processing=true;SpeciesAuthorName=Gadus morhua
|
||||
7 - ABSENCE CELLS FROM AQUAMAPS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Aquamaps_HSPEC=http://goo.gl/24XrmE;Take_Randomly=true;Species_Code=Fis-30189;
|
||||
8 - HCAF_FILTER
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER&DataInputs=B_Box_Left_Lower_Lat=-17;B_Box_Right_Upper_Long=147;B_Box_Right_Upper_Lat=25;B_Box_Left_Lower_Long=89;Table_Label=wps_hcaf_filter;
|
||||
9 - MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING&DataInputs=LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;Z=0;Layers=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff|9be60051-bc99-4a2b-b99d-a681cba411d7;TimeIndex=0;MaxIterations=100;SpeciesName=Latimeria chalumnae;DefaultPrevalence=0.5;YResolution=0.5;OccurrencesTable=http://goo.gl/5cnKKp;XResolution=0.5;OutputTableLabel=wps_maxent;
|
||||
10 - OCCURRENCE_ENRICHMENT
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT&DataInputs=OptionalFilter= ;OutputTableName=wps_enriched;FeaturesNames=temp;OccurrenceTable=http://goo.gl/ZfFcfE;LongitudeColumn=decimallongitude;LatitudeColumn=decimallatitude;ScientificNameColumn=scientificname;Layers=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;TimeColumn=eventdate;Resolution=0.5;
|
||||
11 - PRESENCE_CELLS_GENERATION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION&DataInputs=Number_of_Points=20;Table_Label=hcaf_filtered_wps;Species_Code=Fis-30189;
|
||||
12 - FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR&DataInputs=Resolution=5;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_fao_area_column;
|
||||
13 - FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT&DataInputs=Resolution=5;InputTable=http://goo.gl/yJTIBZ;Longitude_Column=centerlong;Quadrant_Column=quadrant;OutputTableName=wps_fao_quadrant;Latitude_Column=centerlat;
|
||||
14 - CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR&DataInputs=CSquare_Resolution=0.1;Latitude_Column=decimallatitude;InputTable=http://goo.gl/sdlD5a;Longitude_Column=decimallongitude;OutputTableName=wps_csquare_column;
|
||||
15 - GENERIC_CHARTS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS&DataInputs=Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;TopElementsNumber=10;Attributes=x|y
|
||||
16 - GEO_CHART
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART&DataInputs=Latitude=y;Quantities=x;Longitude=x;InputTable=http://goo.gl/lWTvcw
|
||||
17 - TIME_GEO_CHART
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART&DataInputs=Time=time;Latitude=x;Longitude=y;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;
|
||||
18 - TIME_SERIES_CHARTS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS&DataInputs=Attributes=x|y|z;Quantities=fvalue;InputTable=http://goo.gl/lWTvcw;Time=time
|
||||
19 - OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA&DataInputs=Selected species=Gadus morhua;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
20 - OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;Area_type=NORTH SEA;
|
||||
21 - OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES&DataInputs=Species_number=10;End_year=2015;Start_year=2000;
|
||||
22 - OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR&DataInputs=Level=GENUS;Selected taxonomy=Gadus|Merluccius;End_year=2015;Start_year=2000;
|
||||
23 - OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR&DataInputs=Selected species=Gadus morhua|Merluccius merluccius;End_year=2015;Start_year=2000;
|
||||
24 - OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA&DataInputs=Species=Gadus morhua;Area=LME;End_year=2015;Start_year=2000;
|
||||
25 - OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA&DataInputs=Level=GENUS;Taxa_number=10;End_year=2015;Start_year=2000;
|
||||
26 - TIME_SERIES_ANALYSIS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS&DataInputs=FFT_Window_Samples=12;SSA_Points_to_Forecast=3;AggregationFunction=SUM;TimeSeriesTable=http://goo.gl/lWTvcw;Sensitivity=LOW;SSA_Window_in_Samples=12;SSA_EigenvaluesThreshold=0.7;ValueColum=fvalue
|
||||
27 - MAPS_COMPARISON
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON&DataInputs=TimeIndex_1=0;ValuesComparisonThreshold=0.1;TimeIndex_2=0;Z=0;KThreshold=0.5;Layer_1=04e61cb8-3c32-47fe-823c-80ac3d417a0b;Layer_2=04e61cb8-3c32-47fe-823c-80ac3d417a0b;
|
||||
28 - QUALITY_ANALYSIS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS&DataInputs=NegativeCasesTableKeyColumn=csquarecode;DistributionTableProbabilityColumn=probability;PositiveCasesTableKeyColumn=csquarecode;PositiveThreshold=0.8;PositiveCasesTable=http://goo.gl/8zWU7u;DistributionTableKeyColumn=csquarecode;DistributionTable=http://goo.gl/cXbg2n;NegativeThreshold=0.3;NegativeCasesTable=http://goo.gl/8zWU7u;
|
||||
29 - DISCREPANCY_ANALYSIS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS&DataInputs=ComparisonThreshold=0.1;SecondTable=http://goo.gl/cXbg2n;FirstTable=http://goo.gl/BBk8iB;KThreshold=0.5;MaxSamples=10000;FirstTableProbabilityColumn=probability;SecondTableProbabilityColumn=probability;FirstTableCsquareColumn=csquarecode;SecondTableCsquareColumn=csquarecode
|
||||
30 - XYEXTRACTOR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR&DataInputs=OutputTableLabel=wps_xy_extractor;Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;YResolution=0.5;XResolution=0.5;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;BBox_LowerLeftLat=-60;BBox_UpperRightLong=50;Z=0;TimeIndex=0;
|
||||
31 - TIMEEXTRACTION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION&DataInputs=SamplingFreq=-1;X=28;Y=38;Z=0;Resolution=0.5;Layer=cd048cb5-dbb6-414b-a3b9-1f3ac512fbff;OutputTableLabel=wps_time_extr
|
||||
32 - ZEXTRACTION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION&DataInputs=OutputTableLabel=wps_z_extractor;Layer=dffa504b-dbc8-4553-896e-002549f8f5d3;Resolution=100;Y=38;TimeIndex=0;X=28
|
||||
33 - XYEXTRACTOR_TABLE
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE&DataInputs=TimeIndex=0;Z=0;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;geoReferencedTableName=http://goo.gl/KjWYQG;valueColumn=oceanarea;XResolution=0.5;YResolution=0.5;BBox_UpperRightLong=50;BBox_LowerLeftLat=-60;BBox_LowerLeftLong=-50;BBox_UpperRightLat=60;OutputTableLabel=wps_xy_extr_table;
|
||||
34 - TIMEEXTRACTION_TABLE
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE&DataInputs=Z=0;Resolution=0.5;filter= ;zColumn=lme;timeColumn=lme;xColumn=centerlong;yColumn=centerlat;Y=3.75;X=102.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=centerlong;SamplingFreq=-1;OutputTableLabel=wps_time_extr_table;
|
||||
35 - ZEXTRACTION_TABLE
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE&DataInputs=TimeIndex=0;Resolution=1;filter= ;zColumn=centerlong;xColumn=centerlong;yColumn=centerlat;Y=0.25;X=0.25;geoReferencedTableName=http://goo.gl/VDzpch;valueColumn=oceanarea;OutputTableLabel=wps_z_table;
|
||||
36 - HRS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS&DataInputs=PositiveCasesTable=http://goo.gl/VDzpch;NegativeCasesTable=http://goo.gl/VDzpch;OptionalCondition= ;ProjectingAreaTable=http://goo.gl/VDzpch;FeaturesColumns=depthmin|depthmax;
|
||||
37 - ICCAT_VPA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA&DataInputs=StartYear=1950;shortComment=no;EndYear=2013;CAAFile=http://goo.gl/3X3b8T;PCAAFile=http://goo.gl/l5O75s;CPUEFile=http://goo.gl/vGEbuZ;PwaaFile=http://goo.gl/Y0FM5g;waaFile=http://goo.gl/50U7hG;nCPUE=7;CPUE_cut=1;age_plus_group=10;
|
||||
38 - SGVM_INTERPOLATION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION&DataInputs=headingAdjustment=0;maxspeedThr=6;minspeedThr=2;fm=0.5;margin=10;distscale=20;res=100;sigline=0.2;interval=120;equalDist=true;InputFile=http://goo.gl/i16kPw;npoints=10;method=cHs;
|
||||
#39 - CMSY
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY
|
||||
http://dataminer.d4science.org/wps/WebProcessingService?request=Execute&service=WPS&Version=1.0.0&gcube-token=aa291a2b-793c-4e5f-a759-044bd50cd414&lang=en-US&Identifier=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY&DataInputs=IDsFile=http://goo.gl/9rg3qK;StocksFile=http://goo.gl/Mp2ZLY;SelectedStock=HLH_M07
|
|
@ -1,6 +0,0 @@
|
|||
gCube System - License
|
||||
------------------------------------------------------------
|
||||
|
||||
The gCube/gCore software is licensed as Free Open Source software conveying to the EUPL (http://ec.europa.eu/idabc/eupl).
|
||||
The software and documentation is provided by its authors/distributors "as is" and no expressed or
|
||||
implied warranty is given for its use, quality or fitness for a particular case.
|
|
@ -1 +0,0 @@
|
|||
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
|
|
@ -1,80 +0,0 @@
|
|||
The gCube System - dataminer
|
||||
--------------------------------------------------
|
||||
|
||||
An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.
|
||||
|
||||
|
||||
This software is part of the gCube Framework (https://www.gcube-system.org/): an
|
||||
open-source software toolkit used for building and operating Hybrid Data
|
||||
Infrastructures enabling the dynamic deployment of Virtual Research Environments
|
||||
by favouring the realisation of reuse oriented policies.
|
||||
|
||||
The projects leading to this software have received funding from a series of
|
||||
European Union programmes including:
|
||||
* the Sixth Framework Programme for Research and Technological Development -
|
||||
DILIGENT (grant no. 004260);
|
||||
* the Seventh Framework Programme for research, technological development and
|
||||
demonstration - D4Science (grant no. 212488), D4Science-II (grant no.
|
||||
239019),ENVRI (grant no. 283465), EUBrazilOpenBio (grant no. 288754), iMarine
|
||||
(grant no. 283644);
|
||||
* the H2020 research and innovation programme - BlueBRIDGE (grant no. 675680),
|
||||
EGIEngage (grant no. 654142), ENVRIplus (grant no. 654182), Parthenos (grant
|
||||
no. 654119), SoBigData (grant no. 654024);
|
||||
|
||||
|
||||
Version
|
||||
--------------------------------------------------
|
||||
|
||||
1.1.0-SNAPSHOT (2016-03-10)
|
||||
|
||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||
|
||||
|
||||
Authors
|
||||
--------------------------------------------------
|
||||
|
||||
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
|
||||
|
||||
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
|
||||
|
||||
|
||||
Download information
|
||||
--------------------------------------------------
|
||||
|
||||
Source code is available from SVN:
|
||||
https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataMiner
|
||||
|
||||
Binaries can be downloaded from the gCube website:
|
||||
https://www.gcube-system.org/
|
||||
|
||||
|
||||
Installation
|
||||
--------------------------------------------------
|
||||
|
||||
Installation documentation is available on-line in the gCube Wiki:
|
||||
|
||||
|
||||
|
||||
Documentation
|
||||
--------------------------------------------------
|
||||
|
||||
Documentation is available on-line in the gCube Wiki:
|
||||
|
||||
https://wiki.gcube-system.org/gcube/DataMiner_Installation
|
||||
|
||||
|
||||
Support
|
||||
--------------------------------------------------
|
||||
|
||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||
https://support.d4science.org/projects/gcube/
|
||||
|
||||
|
||||
Licensing
|
||||
--------------------------------------------------
|
||||
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
|
@ -1,5 +0,0 @@
|
|||
<ReleaseNotes>
|
||||
<Changeset component="org.gcube.dataanalysis.dataminer.1-1-0" date="2016-10-03">
|
||||
<Change>First Release</Change>
|
||||
</Changeset>
|
||||
</ReleaseNotes>
|
|
@ -1,41 +0,0 @@
|
|||
<assembly
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>servicearchive</id>
|
||||
<formats>
|
||||
<format>tar.gz</format>
|
||||
</formats>
|
||||
<baseDirectory>/</baseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>distro</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<includes>
|
||||
<include>README</include>
|
||||
<include>LICENSE</include>
|
||||
<include>INSTALL</include>
|
||||
<include>MAINTAINERS</include>
|
||||
<include>changelog.xml</include>
|
||||
</includes>
|
||||
<fileMode>755</fileMode>
|
||||
<filtered>true</filtered>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<files>
|
||||
<file>
|
||||
<source>distro/svnpath.txt</source>
|
||||
<outputDirectory>/dataminer-1.1.0-SNAPSHOT</outputDirectory>
|
||||
</file>
|
||||
<file>
|
||||
<source>distro/profile.xml</source>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
</file>
|
||||
<file>
|
||||
<source>target/dataminer-1.1.0-SNAPSHOT.jar</source>
|
||||
<outputDirectory>/dataminer-1.1.0-SNAPSHOT</outputDirectory>
|
||||
</file>
|
||||
|
||||
</files>
|
||||
</assembly>
|
|
@ -1,29 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Resource>
|
||||
<ID></ID>
|
||||
<Type>Service</Type>
|
||||
<Profile>
|
||||
<Description>An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.</Description>
|
||||
<Class>DataAnalysis</Class>
|
||||
<Name>dataminer</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Description>An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.</Description>
|
||||
<Name>dataminer</Name>
|
||||
<Version>1.1.0-SNAPSHOT</Version>
|
||||
<MavenCoordinates>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>dataminer</artifactId>
|
||||
<version>1.1.0-SNAPSHOT</version>
|
||||
</MavenCoordinates>
|
||||
<Type>Service</Type>
|
||||
<Files>
|
||||
<File>dataminer-1.1.0-SNAPSHOT.jar</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
||||
|
||||
|
|
@ -1 +0,0 @@
|
|||
https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataMiner
|
372
pom.xml
372
pom.xml
|
@ -1,372 +0,0 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>maven-parent</artifactId>
|
||||
<groupId>org.gcube.tools</groupId>
|
||||
<version>1.0.0</version>
|
||||
<relativePath />
|
||||
</parent>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>dataminer</artifactId>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
<name>dataminer</name>
|
||||
<description>An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.</description>
|
||||
<scm>
|
||||
<url>https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataMiner</url>
|
||||
</scm>
|
||||
<developers>
|
||||
<developer>
|
||||
<name>Gianpaolo Coro</name>
|
||||
<email>gianpaolo.coro@isti.cnr.it</email>
|
||||
<organization>CNR Pisa, Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"</organization>
|
||||
<roles>
|
||||
<role>architect</role>
|
||||
<role>developer</role>
|
||||
</roles>
|
||||
</developer>
|
||||
</developers>
|
||||
<properties>
|
||||
<distroDirectory>distro</distroDirectory>
|
||||
<release.date>2016-03-10</release.date>
|
||||
<author0>Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), Istituto di
|
||||
Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT</author0>
|
||||
<mantainer0>Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), Istituto di
|
||||
Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT</mantainer0>
|
||||
<wikiUser></wikiUser>
|
||||
<wikiDeveloper>https://wiki.gcube-system.org/gcube/DataMiner_Installation</wikiDeveloper>
|
||||
<wikiAdmin></wikiAdmin>
|
||||
<templatesDirectory>templates</templatesDirectory>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.dataaccess.algorithms</groupId>
|
||||
<artifactId>database-rm-algorithms</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,1.4.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>common-encryption</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>common-scope-maps</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.n52.wps</groupId>
|
||||
<artifactId>52n-wps-io</artifactId>
|
||||
<version>[3.3.1,4.0.0)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.n52.wps</groupId>
|
||||
<artifactId>52n-wps-io-impl</artifactId>
|
||||
<version>[3.3.1,4.0.0)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.n52.wps</groupId>
|
||||
<artifactId>52n-wps-algorithm</artifactId>
|
||||
<version>[3.3.1,4.0.0)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.n52.wps</groupId>
|
||||
<artifactId>52n-wps-server</artifactId>
|
||||
<version>[3.3.1,4.0.0)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- <dependency> <groupId>org.gcube.dataanalysis</groupId> <artifactId>ecological-engine</artifactId>
|
||||
<version>[1.8.5-SNAPSHOT,2.0.0-SNAPSHOT)</version> <exclusions> <exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion>
|
||||
</exclusions> </dependency> -->
|
||||
<dependency>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>ecological-engine-wps-extension</artifactId>
|
||||
<version>[1.0.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>ecological-engine-geospatial-extensions</artifactId>
|
||||
<version>[1.3.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>common-scope-maps</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>ecological-engine-external-algorithms</artifactId>
|
||||
<version>[1.1.5-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>common-utils-encryption</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>common-encryption</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>
|
||||
<!-- <exclusion> <artifactId>accounting-lib</artifactId> <groupId>org.gcube.accounting</groupId>
|
||||
</exclusion> -->
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.dataanalysis</groupId>
|
||||
<artifactId>ecological-engine-smart-executor</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<!-- ><exclusion> <artifactId>common-encryption</artifactId> <groupId>org.gcube.core</groupId>
|
||||
</exclusion> -->
|
||||
<exclusion>
|
||||
<artifactId>common-scope-maps</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-nop</artifactId>
|
||||
<version>1.7.10</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.11</version>
|
||||
<!-- <scope>test</scope> -->
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>common-authorization</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>common-utils-encryption</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-scope</artifactId>
|
||||
<version>[1.2.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
<!-- <scope>provided</scope>-->
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<!--<exclusion>
|
||||
<artifactId>common-scope-maps</artifactId>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
</exclusion>-->
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
<version>3.12.1.GA</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>home-library-jcr</artifactId>
|
||||
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>home-library</artifactId>
|
||||
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- Maven Resources Plugin -->
|
||||
<plugin>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<version>2.6</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-distro-resources</id>
|
||||
<phase>process-resources</phase>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${distroDirectory}</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${templatesDirectory}</directory>
|
||||
<!-- <excludes> <exclude>profile.xml</exclude> <exclude>descriptor.xml</exclude>
|
||||
</excludes> -->
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<artifactId>maven-clean-plugin</artifactId>
|
||||
<version>2.5</version>
|
||||
<configuration>
|
||||
<filesets>
|
||||
<fileset>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<includes>
|
||||
<include>**</include>
|
||||
</includes>
|
||||
<followSymlinks>false</followSymlinks>
|
||||
</fileset>
|
||||
<fileset>
|
||||
<directory>${configDirectory}</directory>
|
||||
<includes>
|
||||
<include>**</include>
|
||||
</includes>
|
||||
<followSymlinks>false</followSymlinks>
|
||||
</fileset>
|
||||
</filesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.1</version>
|
||||
<configuration>
|
||||
<source>1.6</source>
|
||||
<target>1.6</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>2.18.1</version>
|
||||
<configuration>
|
||||
<skipTests>true</skipTests>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<version>2.7</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-profile</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>target</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<filtering>true</filtering>
|
||||
<includes>
|
||||
<include>profile.xml</include>
|
||||
</includes>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>2.5.3</version>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>servicearchive</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>n52-releases</id>
|
||||
<name>52n Releases</name>
|
||||
<url>http://52north.org/maven/repo/releases</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
</project>
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
|
||||
public class CsvFileDataBinding implements IComplexData {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 625383192227478620L;
|
||||
protected GenericFileData payload;
|
||||
|
||||
public CsvFileDataBinding(GenericFileData fileData){
|
||||
this.payload = fileData;
|
||||
}
|
||||
|
||||
public GenericFileData getPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
public Class<String> getSupportedClass() {
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose(){
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.n52.wps.io.data.IData;
|
||||
import org.n52.wps.io.datahandler.generator.GenericFileGenerator;
|
||||
|
||||
public class CsvFileGenerator extends GenericFileGenerator {
|
||||
|
||||
public CsvFileGenerator (){
|
||||
super();
|
||||
supportedIDataTypes.add(CsvFileDataBinding.class);
|
||||
}
|
||||
|
||||
public InputStream generateStream(IData data, String mimeType, String schema) throws IOException {
|
||||
|
||||
// InputStream theStream = new ByteArrayInputStream(((CsvFileDataBinding)data).getPayload().getBytes());
|
||||
InputStream theStream = ((CsvFileDataBinding)data).getPayload().getDataStream();
|
||||
// InputStream theStream = new URL(((CsvFileDataBinding)data).getPayload()).openStream();
|
||||
|
||||
return theStream;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
|
||||
public class D4ScienceDataInputBinding implements IComplexData {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 625383192227478620L;
|
||||
protected GenericFileData payload;
|
||||
|
||||
public D4ScienceDataInputBinding(GenericFileData fileData){
|
||||
this.payload = fileData;
|
||||
}
|
||||
|
||||
public GenericFileData getPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
public Class<GenericFileData> getSupportedClass() {
|
||||
return GenericFileData.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose(){
|
||||
}
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
|
||||
public class D4ScienceFileDataBinding implements IComplexData {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
//DONE manage input binary files with a parser
|
||||
private static final long serialVersionUID = 625383192227478620L;
|
||||
protected GenericFileData payload;
|
||||
|
||||
public D4ScienceFileDataBinding(GenericFileData fileData){
|
||||
this.payload = fileData;
|
||||
}
|
||||
|
||||
public GenericFileData getPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
public Class<String> getSupportedClass() {
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose(){
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.fusesource.hawtbuf.ByteArrayInputStream;
|
||||
import org.n52.wps.io.data.IData;
|
||||
import org.n52.wps.io.datahandler.generator.GenericFileGenerator;
|
||||
|
||||
public class D4ScienceFileGenerator extends GenericFileGenerator {
|
||||
|
||||
public D4ScienceFileGenerator (){
|
||||
super();
|
||||
supportedIDataTypes.add(D4ScienceFileDataBinding.class);
|
||||
}
|
||||
|
||||
public InputStream generateStream(IData data, String mimeType, String schema) throws IOException {
|
||||
|
||||
// InputStream theStream = new ByteArrayInputStream(((D4ScienceFileDataBinding)data).getPayload().getBytes());
|
||||
// InputStream theStream = new URL(((D4ScienceFileDataBinding)data).getPayload()).openStream();
|
||||
InputStream theStream = ((D4ScienceFileDataBinding)data).getPayload().getDataStream();
|
||||
|
||||
return theStream;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.datahandler.parser.AbstractParser;
|
||||
import org.n52.wps.io.datahandler.parser.GenericFileParser;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class D4ScienceFileParser extends AbstractParser{
|
||||
|
||||
private static Logger LOGGER = LoggerFactory.getLogger(D4ScienceDataInputBinding.class);
|
||||
|
||||
public D4ScienceFileParser() {
|
||||
super();
|
||||
supportedIDataTypes.add(D4ScienceDataInputBinding.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public D4ScienceDataInputBinding parse(InputStream input, String mimeType, String schema) {
|
||||
|
||||
GenericFileData theData = new GenericFileData(input, mimeType);
|
||||
LOGGER.info("Found Gis File Input " + mimeType);
|
||||
|
||||
return new D4ScienceDataInputBinding(theData);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
|
||||
public class GifFileDataBinding implements IComplexData {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 625383192227478620L;
|
||||
protected GenericFileData payload;
|
||||
|
||||
public GifFileDataBinding(GenericFileData fileData){
|
||||
this.payload = fileData;
|
||||
}
|
||||
|
||||
public GenericFileData getPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
public Class<String> getSupportedClass() {
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose(){
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.fusesource.hawtbuf.ByteArrayInputStream;
|
||||
import org.n52.wps.io.data.IData;
|
||||
import org.n52.wps.io.datahandler.generator.GenericFileGenerator;
|
||||
|
||||
public class GifFileGenerator extends GenericFileGenerator {
|
||||
|
||||
public GifFileGenerator (){
|
||||
super();
|
||||
supportedIDataTypes.add(GifFileDataBinding.class);
|
||||
}
|
||||
|
||||
public InputStream generateStream(IData data, String mimeType, String schema) throws IOException {
|
||||
|
||||
// InputStream theStream = new ByteArrayInputStream(((GifFileDataBinding)data).getPayload().getBytes());
|
||||
// InputStream theStream = new URL(((GifFileDataBinding)data).getPayload()).openStream();
|
||||
InputStream theStream = ((GifFileDataBinding)data).getPayload().getDataStream();
|
||||
|
||||
return theStream;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
|
||||
public class GisLinkDataBinding implements IComplexData {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 625383192227478620L;
|
||||
protected GenericFileData payload;
|
||||
|
||||
public GisLinkDataBinding(GenericFileData fileData){
|
||||
this.payload = fileData;
|
||||
}
|
||||
|
||||
public GenericFileData getPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
public Class<String> getSupportedClass() {
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose(){
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
|
||||
public class GisLinkDataInputBinding implements IComplexData {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 625383192227478620L;
|
||||
protected GenericFileData payload;
|
||||
|
||||
public GisLinkDataInputBinding(GenericFileData fileData){
|
||||
this.payload = fileData;
|
||||
}
|
||||
|
||||
public GenericFileData getPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
public Class<GenericFileData> getSupportedClass() {
|
||||
return GenericFileData.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose(){
|
||||
}
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.fusesource.hawtbuf.ByteArrayInputStream;
|
||||
import org.n52.wps.io.data.IData;
|
||||
import org.n52.wps.io.datahandler.generator.GenericFileGenerator;
|
||||
|
||||
public class GisLinkGenerator extends GenericFileGenerator {
|
||||
|
||||
public GisLinkGenerator (){
|
||||
super();
|
||||
supportedIDataTypes.add(GisLinkDataBinding.class);
|
||||
}
|
||||
|
||||
public InputStream generateStream(IData data, String mimeType, String schema) throws IOException {
|
||||
// InputStream theStream = new ByteArrayInputStream(((GisLinkDataBinding)data).getPayload().getBytes());
|
||||
// InputStream theStream = new URL(((GisLinkDataBinding)data).getPayload()).openStream();
|
||||
InputStream theStream = ((GisLinkDataBinding)data).getPayload().getDataStream();
|
||||
return theStream;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.datahandler.parser.AbstractParser;
|
||||
import org.n52.wps.io.datahandler.parser.GenericFileParser;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GisLinkParser extends AbstractParser{
|
||||
|
||||
private static Logger LOGGER = LoggerFactory.getLogger(GenericFileParser.class);
|
||||
//TODO manage gis link bindings
|
||||
public GisLinkParser() {
|
||||
super();
|
||||
supportedIDataTypes.add(GisLinkDataBinding.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GisLinkDataInputBinding parse(InputStream input, String mimeType, String schema) {
|
||||
|
||||
GenericFileData theData = new GenericFileData(input, mimeType);
|
||||
LOGGER.info("Found Gis File Input " + mimeType);
|
||||
|
||||
return new GisLinkDataInputBinding(theData);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
import org.n52.wps.io.data.IComplexData;
|
||||
|
||||
public class PngFileDataBinding implements IComplexData {
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 625383192227478620L;
|
||||
protected GenericFileData payload;
|
||||
|
||||
public PngFileDataBinding(GenericFileData fileData){
|
||||
this.payload = fileData;
|
||||
}
|
||||
|
||||
public GenericFileData getPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
public Class<String> getSupportedClass() {
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose(){
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.fusesource.hawtbuf.ByteArrayInputStream;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.n52.wps.io.data.IData;
|
||||
import org.n52.wps.io.datahandler.generator.GenericFileGenerator;
|
||||
|
||||
public class PngFileGenerator extends GenericFileGenerator {
|
||||
|
||||
public PngFileGenerator (){
|
||||
super();
|
||||
supportedIDataTypes.add(PngFileDataBinding.class);
|
||||
}
|
||||
|
||||
public InputStream generateStream(IData data, String mimeType, String schema) throws IOException {
|
||||
// InputStream theStream = new ByteArrayInputStream(((PngFileDataBinding)data).getPayload().getBytes());
|
||||
// InputStream theStream = new URL(((PngFileDataBinding)data).getPayload()).openStream();
|
||||
InputStream theStream = ((PngFileDataBinding)data).getPayload().getDataStream();
|
||||
|
||||
return theStream;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,132 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.capabilities;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ProcessorsFactory;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.InfrastructureDialoguer;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.ConfigurationManager;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.TokenManager;
|
||||
import org.n52.wps.commons.WPSConfig;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GetCapabilitiesBuilder {
|
||||
|
||||
public static String processString = "<wps:Process wps:processVersion=\"1.1.0\">\n\t<ows:Identifier>#CLASS#</ows:Identifier>\n\t<ows:Title>#TITLE#</ows:Title>\n</wps:Process>";
|
||||
|
||||
public String getClassification(String algorithmName, ConfigurationManager configManager) throws Exception{
|
||||
//get algorithms classification:
|
||||
AnalysisLogger.getLogger().debug("Searching for a classification of "+algorithmName);
|
||||
HashMap<String, List<String>> algorithmsClassification = ProcessorsFactory.getAllFeaturesUser(configManager.getConfig());
|
||||
String rightClassification = "Others";
|
||||
for (String classification:algorithmsClassification.keySet()){
|
||||
List<String> algorithms = algorithmsClassification.get(classification);
|
||||
if (algorithms.contains(algorithmName)){
|
||||
AnalysisLogger.getLogger().debug("Found classification"+classification);
|
||||
return classification;
|
||||
}
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("No classification found for "+algorithmName);
|
||||
return rightClassification;
|
||||
}
|
||||
|
||||
public String buildGetCapabilities(Map<String, String[]> parameters) throws Exception {
|
||||
|
||||
ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
|
||||
root.setLevel(ch.qos.logback.classic.Level.OFF);
|
||||
|
||||
LinkedHashMap<String, Object> basicInputs = new LinkedHashMap<String, Object>();
|
||||
//DONE get scope and username from SmartGears to build the get capabilities
|
||||
/* OLD CODE
|
||||
if (parameters != null) {
|
||||
if (parameters.get(ConfigurationManager.scopeParameter) != null)
|
||||
basicInputs.put(ConfigurationManager.scopeParameter, parameters.get(ConfigurationManager.scopeParameter)[0]);
|
||||
if (parameters.get(ConfigurationManager.usernameParameter) != null)
|
||||
basicInputs.put(ConfigurationManager.usernameParameter, parameters.get(ConfigurationManager.usernameParameter)[0]);
|
||||
} else {// case for testing purposes only
|
||||
if (AbstractEcologicalEngineMapper.simulationMode){
|
||||
basicInputs.put(ConfigurationManager.scopeParameter, ConfigurationManager.defaultScope);
|
||||
basicInputs.put(ConfigurationManager.usernameParameter, ConfigurationManager.defaultUsername);
|
||||
}
|
||||
}
|
||||
*/
|
||||
ConfigurationManager configManager = new ConfigurationManager();
|
||||
TokenManager tokenm = new TokenManager();
|
||||
tokenm.getCredentials();
|
||||
String scope = tokenm.getScope();
|
||||
String username = tokenm.getUserName();
|
||||
basicInputs.put(ConfigurationManager.scopeParameter, scope);
|
||||
basicInputs.put(ConfigurationManager.usernameParameter, username);
|
||||
|
||||
configManager.configAlgorithmEnvironment(basicInputs);
|
||||
AnalysisLogger.getLogger().debug("Initializing Capabilities Skeleton in scope " + configManager.getScope() + " with user " + configManager.getUsername());
|
||||
InputStream is = this.getClass().getClassLoader().getResourceAsStream("templates/wpsCapabilitiesSkeleton.xml");
|
||||
String stringTemplate = IOUtils.toString(is, "UTF-8");
|
||||
String host = InetAddress.getLocalHost().getCanonicalHostName();
|
||||
String port = WPSConfig.getInstance().getWPSConfig().getServer().getHostport();
|
||||
stringTemplate = stringTemplate.replace("#HOST#", host).replace("#PORT#", port);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Host: " + host + " Port: " + port);
|
||||
|
||||
String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
|
||||
|
||||
LinkedHashMap<String, String> allalgorithms = new LinkedHashMap<String, String>();
|
||||
List<Class<?>> classes = null;
|
||||
try{
|
||||
AnalysisLogger.getLogger().debug("Taking classes from /classes");
|
||||
classes = GetCapabilitiesChecker.find(packageS);
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("Taking classes from the Jar");
|
||||
classes=GetCapabilitiesChecker.getClassesInSamePackageFromJar(packageS);
|
||||
}
|
||||
for (Class<?> classfind : classes) {
|
||||
org.n52.wps.algorithm.annotation.Algorithm algorithmInfo = classfind.getAnnotation(org.n52.wps.algorithm.annotation.Algorithm.class);
|
||||
if (algorithmInfo != null) {
|
||||
AnalysisLogger.getLogger().debug("Retrieving local declared Algorithm: " + algorithmInfo.title());
|
||||
allalgorithms.put(algorithmInfo.title(), classfind.getName());
|
||||
}
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Getting algorithms from the infrastructure");
|
||||
InfrastructureDialoguer dialoguer = new InfrastructureDialoguer(configManager.getScope());
|
||||
List<String> algorithmsInScope = dialoguer.getAlgorithmsInScope();
|
||||
AnalysisLogger.getLogger().debug("Found " + algorithmsInScope.size() + " algorithms in scope ");
|
||||
StringBuffer capabilities = new StringBuffer();
|
||||
|
||||
for (String algorithmInScope : algorithmsInScope) {
|
||||
String classAlgorithm = allalgorithms.get(algorithmInScope);
|
||||
if (classAlgorithm != null) {
|
||||
AnalysisLogger.getLogger().debug("Approving " + classAlgorithm + " to capabilities ");
|
||||
String algorithmTitle = getClassification(algorithmInScope, configManager)+":"+algorithmInScope;
|
||||
// String algorithmTitle = algorithmInScope;
|
||||
capabilities.append(processString.replace("#TITLE#", algorithmTitle).replace("#CLASS#", classAlgorithm));
|
||||
}
|
||||
}
|
||||
|
||||
stringTemplate = stringTemplate.replace("#PROCESSES#", capabilities.toString());
|
||||
AnalysisLogger.getLogger().debug("Get capabilities built");
|
||||
// System.out.println("Template:\n"+stringTemplate);
|
||||
return stringTemplate;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
GetCapabilitiesBuilder builder = new GetCapabilitiesBuilder();
|
||||
builder.buildGetCapabilities(null);
|
||||
// String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
|
||||
|
||||
// URL scannedUrl =
|
||||
// Thread.currentThread().getContextClassLoader().getResource(packageS);
|
||||
// System.out.println(scannedUrl);
|
||||
|
||||
// GetCapabilitiesChecker.getClassesInSamePackageFromJar(packageS);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,341 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.capabilities;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Enumeration;
|
||||
import java.util.List;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.http.params.HttpConnectionParams;
|
||||
import org.apache.http.params.HttpParams;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GetCapabilitiesChecker {
|
||||
|
||||
private static final char DOT = '.';
|
||||
|
||||
private static final char SLASH = '/';
|
||||
|
||||
private static final String CLASS_SUFFIX = ".class";
|
||||
|
||||
private static final String BAD_PACKAGE_ERROR = "Unable to get resources from path '%s'. Are you sure the package '%s' exists?";
|
||||
|
||||
public static List<Class<?>> getClassesInSamePackageFromJar(String packageName) throws Exception {
|
||||
|
||||
String scannedPath = packageName.replace(".", "/");
|
||||
URL scannedUrl = Thread.currentThread().getContextClassLoader().getResource(scannedPath);
|
||||
String jarPath = scannedUrl.getFile();
|
||||
AnalysisLogger.getLogger().debug("Jar Path complete: " + jarPath);
|
||||
jarPath = jarPath.substring(jarPath.indexOf("file:/") + 6, jarPath.lastIndexOf("!"));
|
||||
if (jarPath.startsWith("home"))
|
||||
jarPath = "/" + jarPath;
|
||||
AnalysisLogger.getLogger().debug("Jar Path: " + jarPath);
|
||||
|
||||
JarFile jarFile = null;
|
||||
List<Class<?>> result = new ArrayList<Class<?>>();
|
||||
|
||||
try {
|
||||
jarFile = new JarFile(jarPath);
|
||||
Enumeration<JarEntry> en = jarFile.entries();
|
||||
|
||||
while (en.hasMoreElements()) {
|
||||
JarEntry entry = en.nextElement();
|
||||
String entryName = entry.getName();
|
||||
packageName = packageName.replace('.', '/');
|
||||
|
||||
if (entryName != null && entryName.endsWith(".class") && entryName.startsWith(packageName)) {
|
||||
try {
|
||||
Class entryClass = Class.forName(entryName.substring(0, entryName.length() - 6).replace('/', '.'));
|
||||
|
||||
if (entryClass != null) {
|
||||
result.add(entryClass);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// do nothing, just continue processing classes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
throw e;
|
||||
} finally {
|
||||
try {
|
||||
if (jarFile != null) {
|
||||
jarFile.close();
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static List<Class<?>> find(String scannedPackage) {
|
||||
String scannedPath = scannedPackage.replace(DOT, SLASH);
|
||||
URL scannedUrl = Thread.currentThread().getContextClassLoader().getResource(scannedPath);
|
||||
if (scannedUrl == null) {
|
||||
throw new IllegalArgumentException(String.format(BAD_PACKAGE_ERROR, scannedPath, scannedPackage));
|
||||
}
|
||||
File scannedDir = new File(scannedUrl.getFile());
|
||||
System.out.println("scannedDir:" + scannedDir);
|
||||
System.out.println("scannedUrl:" + scannedUrl);
|
||||
System.out.println("scannedUrl List:" + scannedDir.listFiles());
|
||||
List<Class<?>> classes = new ArrayList<Class<?>>();
|
||||
for (File file : scannedDir.listFiles()) {
|
||||
classes.addAll(find(file, scannedPackage));
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
private static List<Class<?>> find(File file, String scannedPackage) {
|
||||
List<Class<?>> classes = new ArrayList<Class<?>>();
|
||||
String resource = scannedPackage + DOT + file.getName();
|
||||
if (file.isDirectory()) {
|
||||
for (File child : file.listFiles()) {
|
||||
classes.addAll(find(child, resource));
|
||||
}
|
||||
} else if (resource.endsWith(CLASS_SUFFIX)) {
|
||||
int endIndex = resource.length() - CLASS_SUFFIX.length();
|
||||
String className = resource.substring(0, endIndex);
|
||||
try {
|
||||
if (!(className.contains("IClusterer") || className.contains("IEvaluator") || className.contains("IGenerator") || className.contains("IModeller") || className.contains("ITransducer")))
|
||||
classes.add(Class.forName(className));
|
||||
} catch (ClassNotFoundException ignore) {
|
||||
}
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
public static String readPage(URL url) throws Exception {
|
||||
|
||||
DefaultHttpClient httpClient = new DefaultHttpClient();
|
||||
HttpParams params = httpClient.getParams();
|
||||
HttpConnectionParams.setConnectionTimeout(params, 7 * 60000);
|
||||
HttpConnectionParams.setSoTimeout(params, 7 * 60000);
|
||||
HttpConnectionParams.setStaleCheckingEnabled(params, false);
|
||||
HttpConnectionParams.setSoKeepalive(params, false);
|
||||
|
||||
HttpGet request = new HttpGet(url.toURI());
|
||||
HttpResponse response = httpClient.execute(request);
|
||||
System.out.println("URL executed!");
|
||||
Reader reader = null;
|
||||
try {
|
||||
reader = new InputStreamReader(response.getEntity().getContent());
|
||||
System.out.println("Read input stream!");
|
||||
StringBuffer sb = new StringBuffer();
|
||||
{
|
||||
int read;
|
||||
char[] cbuf = new char[1024];
|
||||
while ((read = reader.read(cbuf)) != -1)
|
||||
sb.append(cbuf, 0, read);
|
||||
}
|
||||
|
||||
EntityUtils.consume(response.getEntity());
|
||||
httpClient.getConnectionManager().shutdown();
|
||||
|
||||
return sb.toString();
|
||||
|
||||
} finally {
|
||||
|
||||
if (reader != null) {
|
||||
try {
|
||||
reader.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static Charset getConnectionCharset(URLConnection connection) {
|
||||
String contentType = null;
|
||||
try {
|
||||
contentType = connection.getContentType();
|
||||
}
|
||||
catch (Exception e) {
|
||||
// specified charset is not found,
|
||||
// skip it to return the default one
|
||||
return Charset.defaultCharset();
|
||||
}
|
||||
if (contentType != null && contentType.length() > 0)
|
||||
{
|
||||
contentType = contentType.toLowerCase();
|
||||
String charsetName = extractCharsetName(contentType);
|
||||
if (charsetName != null && charsetName.length() > 0)
|
||||
{
|
||||
try
|
||||
{
|
||||
return Charset.forName(charsetName);
|
||||
}
|
||||
catch (Exception e) {
|
||||
// specified charset is not found,
|
||||
// skip it to return the default one
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// return the default charset
|
||||
return Charset.defaultCharset();
|
||||
}
|
||||
/**
|
||||
* Extract the charset name form the content type string.
|
||||
* Content type string is received from Content-Type header.
|
||||
*
|
||||
* @param contentType the content type string, must be not null.
|
||||
* @return the found charset name or null if not found.
|
||||
*/
|
||||
private static String extractCharsetName(String contentType) {
|
||||
// split onto media types
|
||||
final String[] mediaTypes = contentType.split(":");
|
||||
if (mediaTypes.length > 0)
|
||||
{
|
||||
// use only the first one, and split it on parameters
|
||||
final String[] params = mediaTypes[0].split(";");
|
||||
|
||||
// find the charset parameter and return it's value
|
||||
for (String each : params)
|
||||
{
|
||||
each = each.trim();
|
||||
if (each.startsWith("charset="))
|
||||
{
|
||||
// return the charset name
|
||||
return each.substring(8).trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private static String RUNTIME_RESOURCE_NAME = "ReportsStoreGateway";
|
||||
private static String CATEGORY_NAME = "Service";
|
||||
|
||||
public static String readPageNoHttpClient(URL url) throws Exception{
|
||||
URLConnection conn = url.openConnection();
|
||||
//pretend you're a browser (make my request from Java more browsery-like.)
|
||||
conn.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11");
|
||||
conn.setDoOutput(true);
|
||||
conn.setAllowUserInteraction(true);
|
||||
conn.setConnectTimeout(25*60000);
|
||||
conn.setReadTimeout(25*60000);
|
||||
|
||||
Charset charset = getConnectionCharset(conn);
|
||||
|
||||
BufferedReader dis = new BufferedReader(new InputStreamReader(conn.getInputStream(), charset));
|
||||
String inputLine;
|
||||
StringBuffer pageBuffer = new StringBuffer();
|
||||
|
||||
// Loop through each line, looking for the closing head element
|
||||
while ((inputLine = dis.readLine()) != null) {
|
||||
pageBuffer.append(inputLine + "\r\n");
|
||||
}
|
||||
|
||||
String page = pageBuffer.toString();
|
||||
System.out.println(page);
|
||||
conn.getInputStream().close();
|
||||
return page;
|
||||
}
|
||||
|
||||
|
||||
public static String readPageHTTPHeader(URL url,String token) throws Exception{
|
||||
URLConnection conn = url.openConnection();
|
||||
//pretend you're a browser (make my request from Java more browsery-like.)
|
||||
conn.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.95 Safari/537.11");
|
||||
conn.setDoOutput(true);
|
||||
conn.setAllowUserInteraction(true);
|
||||
conn.setConnectTimeout(25*60000);
|
||||
conn.setReadTimeout(25*60000);
|
||||
conn.setRequestProperty("gcube-token", token);
|
||||
|
||||
Charset charset = getConnectionCharset(conn);
|
||||
|
||||
BufferedReader dis = new BufferedReader(new InputStreamReader(conn.getInputStream(), charset));
|
||||
String inputLine;
|
||||
StringBuffer pageBuffer = new StringBuffer();
|
||||
|
||||
// Loop through each line, looking for the closing head element
|
||||
while ((inputLine = dis.readLine()) != null) {
|
||||
pageBuffer.append(inputLine + "\r\n");
|
||||
}
|
||||
|
||||
String page = pageBuffer.toString();
|
||||
System.out.println(page);
|
||||
conn.getInputStream().close();
|
||||
return page;
|
||||
}
|
||||
|
||||
//build config.xml
|
||||
public static void main(String[] args) throws Exception {
|
||||
String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
|
||||
List<Class<?>> classes = GetCapabilitiesChecker.find(packageS);
|
||||
|
||||
System.out.println(classes + "\n");
|
||||
|
||||
for (Class<?> classfind : classes) {
|
||||
System.out.println("<Property name=\"Algorithm\" active=\"true\">" + classfind.getName() + "</Property>");
|
||||
}
|
||||
// System.exit(0);
|
||||
System.out.println("\n");
|
||||
System.out.println(classes.size() + " algorithms");
|
||||
|
||||
}
|
||||
|
||||
public static void main1(String[] args) throws Exception {
|
||||
String packageS = "org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses";
|
||||
List<Class<?>> classes = GetCapabilitiesChecker.find(packageS);
|
||||
|
||||
System.out.println(classes + "\n");
|
||||
|
||||
for (Class<?> classfind : classes) {
|
||||
System.out.println("<Property name=\"Algorithm\" active=\"true\">" + classfind.getName() + "</Property>");
|
||||
}
|
||||
// System.exit(0);
|
||||
System.out.println("\n");
|
||||
|
||||
for (Class<?> classfind : classes) {
|
||||
System.out.println("http://localhost:8080/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&Identifier=" + classfind.getName() + "\n");
|
||||
}
|
||||
|
||||
System.out.println("\n");
|
||||
System.out.println("Checking errors in Processes descriptions");
|
||||
ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
|
||||
root.setLevel(ch.qos.logback.classic.Level.OFF);
|
||||
int counter = 0;
|
||||
for (Class<?> classfind : classes) {
|
||||
String httplink = "http://statistical-manager-new.d4science.org:8080/wps/WebProcessingService?Request=DescribeProcess&Service=WPS&Version=1.0.0&Identifier=" + classfind.getName();
|
||||
if (!httplink.contains("IClusterer") && !httplink.contains("IEvaluator") && !httplink.contains("IGenerator") && !httplink.contains("IModeller") && !httplink.contains("ITransducer")) {
|
||||
String pageCheck = readPage(new URL(httplink));
|
||||
counter++;
|
||||
if (pageCheck.contains("ows:ExceptionText") || pageCheck.contains("Exception")) {
|
||||
System.out.println("Reading Link: " + httplink);
|
||||
System.out.println("ERROR:\n" + pageCheck);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
System.out.println("Checked " + counter + " algorithms");
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,141 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.codegeneration;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ProcessorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.StatisticalTypeToWPSType;
|
||||
|
||||
public class ClassGenerator {
|
||||
|
||||
public static String configPath = "./cfg/";
|
||||
public static String generationPath = "./src/main/java/org/gcube/dataanalysis/wps/statisticalmanager/synchserver/mappedclasses/";
|
||||
public StatisticalTypeToWPSType converter;
|
||||
|
||||
public ClassGenerator() throws Exception {
|
||||
converter = new StatisticalTypeToWPSType();
|
||||
}
|
||||
|
||||
public void generateEcologicalEngineClasses() throws Exception {
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setConfigPath(configPath);
|
||||
// set scope etc..
|
||||
HashMap<String, List<String>> algorithms = ProcessorsFactory.getAllFeatures(config);
|
||||
for (String algorithmSet : algorithms.keySet()) {
|
||||
List<String> parametersList = algorithms.get(algorithmSet);
|
||||
System.out.println(algorithmSet + ":" + parametersList.toString());
|
||||
|
||||
for (String algorithm : parametersList) {
|
||||
// got an algorithm
|
||||
System.out.println("Algorithm: " + algorithm);
|
||||
String description = ""; // get this information
|
||||
String name = ""; // get this information
|
||||
StringBuffer classWriter = new StringBuffer();
|
||||
List<StatisticalType> inputs = null;
|
||||
StatisticalType outputs = null;
|
||||
name = algorithm;
|
||||
// build class preamble
|
||||
config.setAgent(algorithm);
|
||||
config.setModel(algorithm);
|
||||
String packageString = "";
|
||||
String interfaceString = "";
|
||||
try{
|
||||
if (algorithmSet.equals("DISTRIBUTIONS")) {
|
||||
packageString = "generators";
|
||||
interfaceString = "IGenerator";
|
||||
inputs = GeneratorsFactory.getAlgorithmParameters(configPath, algorithm);
|
||||
description = GeneratorsFactory.getDescription(configPath, algorithm);
|
||||
outputs = GeneratorsFactory.getAlgorithmOutput(configPath, algorithm);
|
||||
} else if (algorithmSet.equals("TRANSDUCERS")) {
|
||||
packageString = "transducerers";
|
||||
interfaceString = "ITransducer";
|
||||
inputs = TransducerersFactory.getTransducerParameters(config, algorithm);
|
||||
description = TransducerersFactory.getDescription(config, algorithm);
|
||||
outputs = TransducerersFactory.getTransducerOutput(config, algorithm);
|
||||
} else if (algorithmSet.equals("MODELS")) {
|
||||
packageString = "modellers";
|
||||
interfaceString = "IModeller";
|
||||
inputs = ModelersFactory.getModelParameters(configPath, algorithm);
|
||||
description = ModelersFactory.getDescription(configPath, algorithm);
|
||||
outputs = ModelersFactory.getModelOutput(configPath, algorithm);
|
||||
} else if (algorithmSet.equals("CLUSTERERS")) {
|
||||
packageString = "clusterers";
|
||||
interfaceString = "IClusterer";
|
||||
inputs = ClusterersFactory.getClustererParameters(configPath, algorithm);
|
||||
description = ClusterersFactory.getDescription(configPath, algorithm);
|
||||
outputs = ClusterersFactory.getClustererOutput(configPath, algorithm);
|
||||
} else if (algorithmSet.equals("TEMPORAL_ANALYSIS")) {
|
||||
|
||||
} else if (algorithmSet.equals("EVALUATORS")) {
|
||||
packageString = "evaluators";
|
||||
interfaceString = "IEvaluator";
|
||||
inputs = EvaluatorsFactory.getEvaluatorParameters(configPath, algorithm);
|
||||
description = EvaluatorsFactory.getDescription(configPath, algorithm);
|
||||
outputs = EvaluatorsFactory.getEvaluatorOutput(configPath, algorithm);
|
||||
}
|
||||
}catch(Exception e){
|
||||
System.out.println("Error in retrieving output: "+e.getLocalizedMessage());
|
||||
}
|
||||
classWriter.append(((String) StatisticalTypeToWPSType.templates.get("package")).replace("#PACKAGE#", packageString) + "\n" + ((String) StatisticalTypeToWPSType.templates.get("import")) + "\n");
|
||||
System.out.println("Class preamble: \n" + classWriter.toString());
|
||||
|
||||
// build class description
|
||||
String classdescription = (String) StatisticalTypeToWPSType.templates.get("description");
|
||||
//modification of 20/07/15
|
||||
classdescription = classdescription.replace("#TITLE#", name).replace("#ABSTRACT#", description).replace("#CLASSNAME#", name).replace("#PACKAGE#", packageString);
|
||||
System.out.println("Class description : \n" + classdescription);
|
||||
String classdefinition = (String) StatisticalTypeToWPSType.templates.get("class_definition");
|
||||
classdefinition = classdefinition.replace("#CLASSNAME#", name).replace("#INTERFACE#", interfaceString);
|
||||
System.out.println("Class definition: \n" + classdefinition);
|
||||
classWriter.append(classdescription + "\n");
|
||||
classWriter.append(classdefinition + "\n");
|
||||
// attach scope input deprecated!
|
||||
// classWriter.append((String) StatisticalTypeToWPSType.templates.get("scopeInput") + "\n");
|
||||
// classWriter.append((String) StatisticalTypeToWPSType.templates.get("usernameInput") + "\n");
|
||||
for (StatisticalType input : inputs) {
|
||||
System.out.println(input);
|
||||
String wpsInput = converter.convert2WPSType(input, true, config);
|
||||
if (wpsInput != null) {
|
||||
classWriter.append(wpsInput + "\n");
|
||||
System.out.println("Input:\n" + wpsInput);
|
||||
}
|
||||
}
|
||||
if (outputs != null) {
|
||||
System.out.println("Alg. Output:\n" + outputs);
|
||||
String wpsOutput = converter.convert2WPSType(outputs, false, config);
|
||||
classWriter.append(wpsOutput + "\n");
|
||||
System.out.println("Output:\n" + wpsOutput);
|
||||
}
|
||||
else
|
||||
System.out.println("Output is empty!");
|
||||
// add potential outputs
|
||||
classWriter.append((String) StatisticalTypeToWPSType.templates.getProperty("optionalOutput") + "\n");
|
||||
classWriter.append((String) StatisticalTypeToWPSType.templates.get("class_closure"));
|
||||
|
||||
System.out.println("Class:\n" + classWriter.toString());
|
||||
System.out.println("Saving...");
|
||||
FileTools.saveString(generationPath + packageString+"/"+algorithm + ".java", classWriter.toString(), true, "UTF-8");
|
||||
// break;
|
||||
}
|
||||
// break;
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
ClassGenerator generator = new ClassGenerator();
|
||||
generator.generateEcologicalEngineClasses();
|
||||
System.out.println("Finished!");
|
||||
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure;
|
||||
|
||||
public class DatabaseInfo {
|
||||
|
||||
public String username;
|
||||
public String password;
|
||||
public String url;
|
||||
public String driver = "org.postgresql.Driver";
|
||||
public String dialect = "org.hibernate.dialect.PostgreSQLDialect";
|
||||
public String tablespace = "";
|
||||
|
||||
|
||||
public String toString(){
|
||||
return "DB Info: "+username+":"+password+" - "+url+" ("+driver+","+dialect+","+tablespace+")";
|
||||
}
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.encryption.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.GenericResource;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.*;
|
||||
|
||||
public class InfrastructureDialoguer {
|
||||
public String scope;
|
||||
|
||||
public InfrastructureDialoguer(String scope){
|
||||
ScopeProvider.instance.set(scope);
|
||||
this.scope = scope;
|
||||
}
|
||||
|
||||
public DatabaseInfo getDatabaseInfo(String resourceName) throws Exception{
|
||||
DatabaseInfo dbi = new DatabaseInfo();
|
||||
AnalysisLogger.getLogger().debug("Searching for Database "+resourceName+" in scope "+scope);
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'StatisticalManagerDataBase' ");
|
||||
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq '"+resourceName+"' ");
|
||||
query.addCondition("$resource/Profile/Name eq '"+resourceName+"' ");
|
||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||
List<ServiceEndpoint> resources = client.submit(query);
|
||||
if (resources==null || resources.size()==0){
|
||||
throw new Exception("No resource named "+resourceName+" available in scope "+scope);
|
||||
}
|
||||
else{
|
||||
AccessPoint ap = resources.get(0).profile().accessPoints().iterator().next();
|
||||
dbi.url = ap.address();
|
||||
dbi.username = ap.username();
|
||||
dbi.password = StringEncrypter.getEncrypter().decrypt(ap.password().trim());
|
||||
|
||||
for (ServiceEndpoint.Property property:ap.properties()){
|
||||
if (property.name().equalsIgnoreCase("driver"))
|
||||
dbi.driver = property.value();
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Found Database : "+dbi);
|
||||
}
|
||||
|
||||
if (dbi.url == null)
|
||||
throw new Exception("No database URL for resource "+resourceName+" available in scope "+scope);
|
||||
return dbi;
|
||||
|
||||
}
|
||||
|
||||
|
||||
public List<String> getAlgorithmsInScope() throws Exception{
|
||||
AnalysisLogger.getLogger().debug("Searching for Algorithms in scope "+scope);
|
||||
SimpleQuery query = queryFor(GenericResource.class);
|
||||
query.addCondition("$resource/Profile/SecondaryType eq 'StatisticalManagerAlgorithm' ");
|
||||
DiscoveryClient<GenericResource> client = clientFor(GenericResource.class);
|
||||
List<GenericResource> resources = client.submit(query);
|
||||
if (resources==null || resources.size()==0){
|
||||
throw new Exception("No resource named StatisticalManagerAlgorithm available in scope "+scope);
|
||||
}
|
||||
List<String> resourcesNames = new ArrayList<String>();
|
||||
AnalysisLogger.getLogger().debug("Found "+resources.size()+" resources");
|
||||
for (GenericResource resource: resources){
|
||||
resourcesNames.add(resource.profile().name());
|
||||
}
|
||||
return resourcesNames;
|
||||
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
AnalysisLogger.setLogger("cfg/"
|
||||
+ AlgorithmConfiguration.defaultLoggerFile);
|
||||
InfrastructureDialoguer dialoguer = new InfrastructureDialoguer("/gcube/devsec/devVRE");
|
||||
// dialoguer.getDatabaseInfo("StatisticalManagerDataBase");
|
||||
dialoguer.getDatabaseInfo("FishBase");
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure;
|
||||
|
||||
public class TableCoherenceChecker {
|
||||
|
||||
public static boolean isSystemTable(String tablename){
|
||||
if (tablename.equalsIgnoreCase("hcaf_d") ||
|
||||
tablename.equalsIgnoreCase("occurrencecells") ||
|
||||
tablename.equalsIgnoreCase("hspen") ||
|
||||
tablename.equalsIgnoreCase("hspen_mini")||
|
||||
tablename.equalsIgnoreCase("hspen_mini_100")||
|
||||
tablename.equalsIgnoreCase("hspen_mini_10")||
|
||||
tablename.equalsIgnoreCase("hcaf_d_2050"))
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses;
|
||||
|
||||
public interface IClusterer {
|
||||
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses;
|
||||
|
||||
public interface IEvaluator {
|
||||
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses;
|
||||
|
||||
public interface IGenerator {
|
||||
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses;
|
||||
|
||||
public interface IModeller {
|
||||
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses;
|
||||
|
||||
public interface ITransducer {
|
||||
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="DBSCAN", abstrakt="A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN", version = "1.1.0")
|
||||
public class DBSCAN extends AbstractEcologicalEngineMapper implements IClusterer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "OccurrencePointsTable", binding = GenericFileDataBinding.class) public void setOccurrencePointsTable(GenericFileData file) {inputs.put("OccurrencePointsTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution", defaultValue="OccCluster_", title="table name of the resulting distribution", identifier = "OccurrencePointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOccurrencePointsClusterLabel(String data) {inputs.put("OccurrencePointsClusterLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: epsilon. DBScan epsilon parameter", defaultValue="10", title="DBScan epsilon parameter", identifier = "epsilon", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setepsilon(Integer data) {inputs.put("epsilon",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers)", defaultValue="1", title="DBScan minimum points parameter (identifies outliers)", identifier = "min_points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmin_points(Integer data) {inputs.put("min_points",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="KMEANS", abstrakt="A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS", version = "1.1.0")
|
||||
public class KMEANS extends AbstractEcologicalEngineMapper implements IClusterer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "OccurrencePointsTable", binding = GenericFileDataBinding.class) public void setOccurrencePointsTable(GenericFileData file) {inputs.put("OccurrencePointsTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution", defaultValue="OccCluster_", title="table name of the resulting distribution", identifier = "OccurrencePointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOccurrencePointsClusterLabel(String data) {inputs.put("OccurrencePointsClusterLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: k. expected Number of Clusters", defaultValue="3", title="expected Number of Clusters", identifier = "k", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setk(Integer data) {inputs.put("k",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: max_runs. max runs of the clustering procedure", defaultValue="10", title="max runs of the clustering procedure", identifier = "max_runs", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmax_runs(Integer data) {inputs.put("max_runs",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: max_optimization_steps. max number of internal optimization steps", defaultValue="5", title="max number of internal optimization steps", identifier = "max_optimization_steps", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmax_optimization_steps(Integer data) {inputs.put("max_optimization_steps",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: min_points. number of points which define an outlier set", defaultValue="2", title="number of points which define an outlier set", identifier = "min_points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmin_points(Integer data) {inputs.put("min_points",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="LOF", abstrakt="Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF", version = "1.1.0")
|
||||
public class LOF extends AbstractEcologicalEngineMapper implements IClusterer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "PointsTable", binding = GenericFileDataBinding.class) public void setPointsTable(GenericFileData file) {inputs.put("PointsTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from PointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: PointsClusterLabel. table name of the resulting distribution", defaultValue="Cluster_", title="table name of the resulting distribution", identifier = "PointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setPointsClusterLabel(String data) {inputs.put("PointsClusterLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors", defaultValue="2", title="locality (usually called k): minimal number of nearest neighbors", identifier = "minimal_points_lower_bound", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setminimal_points_lower_bound(Integer data) {inputs.put("minimal_points_lower_bound",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation", defaultValue="10", title="maximum number of nearest neighbors to take into account for outliers evaluation", identifier = "minimal_points_upper_bound", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setminimal_points_upper_bound(Integer data) {inputs.put("minimal_points_upper_bound",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: distance_function. the distance function to use in the calculation", allowedValues= {"euclidian distance","squared distance","cosine distance","inverted cosine distance","angle"}, defaultValue="euclidian distance", title="the distance function to use in the calculation", identifier = "distance_function", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setdistance_function(String data) {inputs.put("distance_function",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2)", defaultValue="2", title="the LOF score threshold over which the point is an outlier (usually 2)", identifier = "lof_threshold", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setlof_threshold(Integer data) {inputs.put("lof_threshold",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="XMEANS", abstrakt="A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS", version = "1.1.0")
|
||||
public class XMEANS extends AbstractEcologicalEngineMapper implements IClusterer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "OccurrencePointsTable", binding = GenericFileDataBinding.class) public void setOccurrencePointsTable(GenericFileData file) {inputs.put("OccurrencePointsTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", defaultValue="", title="column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ]", identifier = "FeaturesColumnNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumnNames(String data) {inputs.put("FeaturesColumnNames",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution", defaultValue="OccCluster_", title="table name of the resulting distribution", identifier = "OccurrencePointsClusterLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOccurrencePointsClusterLabel(String data) {inputs.put("OccurrencePointsClusterLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning", defaultValue="10", title="XMeans max number of overall iterations of the clustering learning", identifier = "maxIterations", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmaxIterations(Integer data) {inputs.put("maxIterations",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: minClusters. minimum number of expected clusters", defaultValue="1", title="minimum number of expected clusters", identifier = "minClusters", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setminClusters(Integer data) {inputs.put("minClusters",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: maxClusters. maximum number of clusters to produce", defaultValue="50", title="maximum number of clusters to produce", identifier = "maxClusters", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmaxClusters(Integer data) {inputs.put("maxClusters",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: min_points. number of points which define an outlier set", defaultValue="2", title="number of points which define an outlier set", identifier = "min_points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmin_points(Integer data) {inputs.put("min_points",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", title="Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,30 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="DISCREPANCY_ANALYSIS", abstrakt="An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS", version = "1.1.0")
|
||||
public class DISCREPANCY_ANALYSIS extends AbstractEcologicalEngineMapper implements IEvaluator{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: FirstTable. First Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="First Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "FirstTable", binding = GenericFileDataBinding.class) public void setFirstTable(GenericFileData file) {inputs.put("FirstTable",file);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: SecondTable. Second Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="Second Table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "SecondTable", binding = GenericFileDataBinding.class) public void setSecondTable(GenericFileData file) {inputs.put("SecondTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FirstTableCsquareColumn. the csquares column name in the first table [the name of a column from FirstTable]", defaultValue="csquarecode", title="the csquares column name in the first table [the name of a column from FirstTable]", identifier = "FirstTableCsquareColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFirstTableCsquareColumn(String data) {inputs.put("FirstTableCsquareColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SecondTableCsquareColumn. the csquares column name in the second table [the name of a column from SecondTable]", defaultValue="csquarecode", title="the csquares column name in the second table [the name of a column from SecondTable]", identifier = "SecondTableCsquareColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSecondTableCsquareColumn(String data) {inputs.put("SecondTableCsquareColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FirstTableProbabilityColumn. the probability column in the first table [the name of a column from FirstTable]", defaultValue="probability", title="the probability column in the first table [the name of a column from FirstTable]", identifier = "FirstTableProbabilityColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFirstTableProbabilityColumn(String data) {inputs.put("FirstTableProbabilityColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SecondTableProbabilityColumn. the probability column in the second table [the name of a column from SecondTable]", defaultValue="probability", title="the probability column in the second table [the name of a column from SecondTable]", identifier = "SecondTableProbabilityColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSecondTableProbabilityColumn(String data) {inputs.put("SecondTableProbabilityColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: ComparisonThreshold. the comparison threshold", defaultValue="0.1", title="the comparison threshold", identifier = "ComparisonThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setComparisonThreshold(Double data) {inputs.put("ComparisonThreshold",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxSamples. the comparison threshold", defaultValue="10000", title="the comparison threshold", identifier = "MaxSamples", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxSamples(Integer data) {inputs.put("MaxSamples",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: KThreshold. Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", defaultValue="0.5", title="Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", identifier = "KThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setKThreshold(Double data) {inputs.put("KThreshold",""+data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="HRS", abstrakt="An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS", version = "1.1.0")
|
||||
public class HRS extends AbstractEcologicalEngineMapper implements IEvaluator{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: ProjectingAreaTable. A Table containing projecting area information [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing projecting area information [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "ProjectingAreaTable", binding = GenericFileDataBinding.class) public void setProjectingAreaTable(GenericFileData file) {inputs.put("ProjectingAreaTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OptionalCondition. optional filter for taking area rows", defaultValue="where oceanarea>0", title="optional filter for taking area rows", identifier = "OptionalCondition", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOptionalCondition(String data) {inputs.put("OptionalCondition",data);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: PositiveCasesTable. A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "PositiveCasesTable", binding = GenericFileDataBinding.class) public void setPositiveCasesTable(GenericFileData file) {inputs.put("PositiveCasesTable",file);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: NegativeCasesTable. A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "NegativeCasesTable", binding = GenericFileDataBinding.class) public void setNegativeCasesTable(GenericFileData file) {inputs.put("NegativeCasesTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesColumns. Features columns [a sequence of names of columns from PositiveCasesTable separated by | ]", defaultValue="", title="Features columns [a sequence of names of columns from PositiveCasesTable separated by | ]", identifier = "FeaturesColumns", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesColumns(String data) {inputs.put("FeaturesColumns",data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,28 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="MAPS_COMPARISON", abstrakt="An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON", version = "1.1.0")
|
||||
public class MAPS_COMPARISON extends AbstractEcologicalEngineMapper implements IEvaluator{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layer_1. First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", defaultValue="", title="First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", identifier = "Layer_1", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer_1(String data) {inputs.put("Layer_1",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layer_2. Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", defaultValue="", title="Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer", identifier = "Layer_2", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer_2(String data) {inputs.put("Layer_2",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Z. value of Z. Default is 0, that means comparison will be at surface level", defaultValue="0", title="value of Z. Default is 0, that means comparison will be at surface level", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setZ(Integer data) {inputs.put("Z",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: ValuesComparisonThreshold. A comparison threshold for the values in the map. Null equals to 0.1", defaultValue="0.1", title="A comparison threshold for the values in the map. Null equals to 0.1", identifier = "ValuesComparisonThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setValuesComparisonThreshold(Double data) {inputs.put("ValuesComparisonThreshold",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex_1. First Layer Time Index. The default is the first", defaultValue="0", title="First Layer Time Index. The default is the first", identifier = "TimeIndex_1", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex_1(Integer data) {inputs.put("TimeIndex_1",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex_2. Second Layer Time Index. The default is the first", defaultValue="0", title="Second Layer Time Index. The default is the first", identifier = "TimeIndex_2", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex_2(Integer data) {inputs.put("TimeIndex_2",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: KThreshold. Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", defaultValue="0.5", title="Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5", identifier = "KThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setKThreshold(Double data) {inputs.put("KThreshold",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Error Distribution", title="Error Distribution", identifier = "Error Distribution", binding = PngFileDataBinding.class) public GenericFileData getDistribution_of_the_Error() {URL url=null;try {url = new URL((String) outputs.get("Error Distribution")); return new GenericFileData(url.openStream(),"image/png");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,30 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="QUALITY_ANALYSIS", abstrakt="An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS", version = "1.1.0")
|
||||
public class QUALITY_ANALYSIS extends AbstractEcologicalEngineMapper implements IEvaluator{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: PositiveCasesTable. A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing positive cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "PositiveCasesTable", binding = GenericFileDataBinding.class) public void setPositiveCasesTable(GenericFileData file) {inputs.put("PositiveCasesTable",file);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: NegativeCasesTable. A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="A Table containing negative cases [a http link to a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM]", maxOccurs=1, minOccurs=1, identifier = "NegativeCasesTable", binding = GenericFileDataBinding.class) public void setNegativeCasesTable(GenericFileData file) {inputs.put("NegativeCasesTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: PositiveCasesTableKeyColumn. Positive Cases Table Key Column [the name of a column from PositiveCasesTable]", defaultValue="csquarecode", title="Positive Cases Table Key Column [the name of a column from PositiveCasesTable]", identifier = "PositiveCasesTableKeyColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setPositiveCasesTableKeyColumn(String data) {inputs.put("PositiveCasesTableKeyColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: NegativeCasesTableKeyColumn. Negative Cases Table Key Column [the name of a column from NegativeCasesTable]", defaultValue="csquarecode", title="Negative Cases Table Key Column [the name of a column from NegativeCasesTable]", identifier = "NegativeCasesTableKeyColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setNegativeCasesTableKeyColumn(String data) {inputs.put("NegativeCasesTableKeyColumn",data);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: DistributionTable. A probability distribution table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="A probability distribution table [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "DistributionTable", binding = GenericFileDataBinding.class) public void setDistributionTable(GenericFileData file) {inputs.put("DistributionTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: DistributionTableKeyColumn. Distribution Table Key Column [the name of a column from DistributionTable]", defaultValue="csquarecode", title="Distribution Table Key Column [the name of a column from DistributionTable]", identifier = "DistributionTableKeyColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDistributionTableKeyColumn(String data) {inputs.put("DistributionTableKeyColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: DistributionTableProbabilityColumn. Distribution Table Probability Column [the name of a column from DistributionTable]", defaultValue="probability", title="Distribution Table Probability Column [the name of a column from DistributionTable]", identifier = "DistributionTableProbabilityColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDistributionTableProbabilityColumn(String data) {inputs.put("DistributionTableProbabilityColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: PositiveThreshold. Positive acceptance threshold", defaultValue="0.8", title="Positive acceptance threshold", identifier = "PositiveThreshold", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setPositiveThreshold(String data) {inputs.put("PositiveThreshold",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: NegativeThreshold. Negative acceptance threshold", defaultValue="0.3", title="Negative acceptance threshold", identifier = "NegativeThreshold", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setNegativeThreshold(String data) {inputs.put("NegativeThreshold",data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,44 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="BIONYM", abstrakt="An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM", version = "1.1.0")
|
||||
public class BIONYM extends AbstractEcologicalEngineMapper implements IGenerator{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: RawTaxaNamesTable. Input table containing raw taxa names that you want to match [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Input table containing raw taxa names that you want to match [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "RawTaxaNamesTable", binding = GenericFileDataBinding.class) public void setRawTaxaNamesTable(GenericFileData file) {inputs.put("RawTaxaNamesTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: RawNamesColumn. The column containing the raw taxa names with or without authoship information [the name of a column from RawTaxaNamesTable]", defaultValue="rawnames", title="The column containing the raw taxa names with or without authoship information [the name of a column from RawTaxaNamesTable]", identifier = "RawNamesColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setRawNamesColumn(String data) {inputs.put("RawNamesColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. Name of the table which will contain the matches", defaultValue="bionout", title="Name of the table which will contain the matches", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Taxa_Authority_File. The reference dataset to use", allowedValues= {"ASFIS","FISHBASE","OBIS","OBIS_ANIMALIA","OBIS_CNIDARIA","OBIS_ECHINODERMATA","OBIS_PLATYHELMINTHES","COL_FULL","COL_CHORDATA","COL_MAMMALIA","IRMNG_ACTINOPTERYGII","WORMS_ANIMALIA","WORMS_PISCES"}, defaultValue="FISHBASE", title="The reference dataset to use", identifier = "Taxa_Authority_File", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTaxa_Authority_File(String data) {inputs.put("Taxa_Authority_File",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Parser_Name. The Species - Authority parser", allowedValues= {"SIMPLE","GNI","NONE"}, defaultValue="SIMPLE", title="The Species - Authority parser", identifier = "Parser_Name", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setParser_Name(String data) {inputs.put("Parser_Name",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Activate_Preparsing_Processing. Use preparsing rules to correct common errors", defaultValue="true", allowedValues= {"true","false"}, title="Use preparsing rules to correct common errors", identifier = "Activate_Preparsing_Processing", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setActivate_Preparsing_Processing(Boolean data) {inputs.put("Activate_Preparsing_Processing",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Use_Stemmed_Genus_and_Species. Process using Genus and Species names without declension", defaultValue="false", allowedValues= {"true","false"}, title="Process using Genus and Species names without declension", identifier = "Use_Stemmed_Genus_and_Species", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setUse_Stemmed_Genus_and_Species(Boolean data) {inputs.put("Use_Stemmed_Genus_and_Species",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Accuracy_vs_Speed. A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", allowedValues= {"MAX_ACCURACY","LOW_SPEED","MEDIUM_SPEED","HIGH_SPEED","MAX_SPEED"}, defaultValue="MAX_ACCURACY", title="A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", identifier = "Accuracy_vs_Speed", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAccuracy_vs_Speed(String data) {inputs.put("Accuracy_vs_Speed",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_1. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="GSAy", title="Choose a Matcher", identifier = "Matcher_1", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_1(String data) {inputs.put("Matcher_1",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_1. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_1", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_1(Double data) {inputs.put("Threshold_1",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_1. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_1", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_1(Integer data) {inputs.put("MaxResults_1",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_2. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="FUZZYMATCH", title="Choose a Matcher", identifier = "Matcher_2", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_2(String data) {inputs.put("Matcher_2",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_2. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_2", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_2(Double data) {inputs.put("Threshold_2",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_2. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_2", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_2(Integer data) {inputs.put("MaxResults_2",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_3. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="LEVENSHTEIN", title="Choose a Matcher", identifier = "Matcher_3", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_3(String data) {inputs.put("Matcher_3",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_3. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_3", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_3(Double data) {inputs.put("Threshold_3",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_3. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_3", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_3(Integer data) {inputs.put("MaxResults_3",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_4. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="TRIGRAM", title="Choose a Matcher", identifier = "Matcher_4", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_4(String data) {inputs.put("Matcher_4",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_4. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_4", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_4(Double data) {inputs.put("Threshold_4",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_4. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_4", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_4(Integer data) {inputs.put("MaxResults_4",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_5. Choose a Matcher (Optional)", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="NONE", title="Choose a Matcher (Optional)", identifier = "Matcher_5", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_5(String data) {inputs.put("Matcher_5",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_5. Threshold (def. 0.2)", defaultValue="0.2", title="Threshold (def. 0.2)", identifier = "Threshold_5", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_5(Double data) {inputs.put("Threshold_5",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_5. The maximum number of matching candidates per each raw input species", defaultValue="0", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_5", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_5(Integer data) {inputs.put("MaxResults_5",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,23 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="CMSY", abstrakt="An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY", version = "1.1.0")
|
||||
public class CMSY extends AbstractEcologicalEngineMapper implements IGenerator{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK", defaultValue="", title="Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK", identifier = "IDsFile", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setIDsFile(String data) {inputs.put("IDsFile",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY", defaultValue="", title="Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY", identifier = "StocksFile", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStocksFile(String data) {inputs.put("StocksFile",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07", defaultValue="", title="The stock on which the procedure has to focus e.g. HLH_M07", identifier = "SelectedStock", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelectedStock(String data) {inputs.put("SelectedStock",data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,32 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="ICCAT_VPA", abstrakt="An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA", version = "1.1.0")
|
||||
public class ICCAT_VPA extends AbstractEcologicalEngineMapper implements IGenerator{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: StartYear. First year of the dataset temporal extent", defaultValue="1950", title="First year of the dataset temporal extent", identifier = "StartYear", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setStartYear(Integer data) {inputs.put("StartYear",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: EndYear. Last year of the dataset temporal extent", defaultValue="2013", title="Last year of the dataset temporal extent", identifier = "EndYear", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setEndYear(Integer data) {inputs.put("EndYear",""+data);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: CAAFile. Catch at Age Matrix (Number of Fish caught by year and for each age)", title="Catch at Age Matrix (Number of Fish caught by year and for each age)", maxOccurs=1, minOccurs=1, identifier = "CAAFile", binding = D4ScienceDataInputBinding.class) public void setCAAFile(GenericFileData file) {inputs.put("CAAFile",file);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: PCAAFile. Partial Catch at Age Matrix (Number of Fish caught by gear and year and for each age)", title="Partial Catch at Age Matrix (Number of Fish caught by gear and year and for each age)", maxOccurs=1, minOccurs=1, identifier = "PCAAFile", binding = D4ScienceDataInputBinding.class) public void setPCAAFile(GenericFileData file) {inputs.put("PCAAFile",file);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: CPUEFile. Table of Catch Per Unit of Effort used in the stock assessment", title="Table of Catch Per Unit of Effort used in the stock assessment", maxOccurs=1, minOccurs=1, identifier = "CPUEFile", binding = D4ScienceDataInputBinding.class) public void setCPUEFile(GenericFileData file) {inputs.put("CPUEFile",file);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: PwaaFile. Partial weight at age (Weight of Fish caught by gear and year and for each age)", title="Partial weight at age (Weight of Fish caught by gear and year and for each age)", maxOccurs=1, minOccurs=1, identifier = "PwaaFile", binding = D4ScienceDataInputBinding.class) public void setPwaaFile(GenericFileData file) {inputs.put("PwaaFile",file);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: waaFile. Fecundity at age (Fecundity of Fish caught by year and for each age)", title="Fecundity at age (Fecundity of Fish caught by year and for each age)", maxOccurs=1, minOccurs=1, identifier = "waaFile", binding = D4ScienceDataInputBinding.class) public void setwaaFile(GenericFileData file) {inputs.put("waaFile",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: shortComment. Free text for users to describe the current simulation", defaultValue=" ", title="Free text for users to describe the current simulation", identifier = "shortComment", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setshortComment(String data) {inputs.put("shortComment",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: nCPUE. Number of Catch Per Unit of Effort Time series to use", defaultValue="7", title="Number of Catch Per Unit of Effort Time series to use", identifier = "nCPUE", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setnCPUE(Integer data) {inputs.put("nCPUE",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: CPUE_cut. Identifier of the Catch Per Unit of Effort Time Serie to be shrunk", defaultValue="1", title="Identifier of the Catch Per Unit of Effort Time Serie to be shrunk", identifier = "CPUE_cut", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setCPUE_cut(Integer data) {inputs.put("CPUE_cut",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: n_remove_year. Number of the (last) years to be removed", defaultValue="1", title="Number of the (last) years to be removed", identifier = "n_remove_year", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setn_remove_year(Integer data) {inputs.put("n_remove_year",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: age_plus_group. Maximal age class of catches to be taken into account", defaultValue="10", title="Maximal age class of catches to be taken into account", identifier = "age_plus_group", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setage_plus_group(Integer data) {inputs.put("age_plus_group",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="ABSENCE_CELLS_FROM_AQUAMAPS", abstrakt="An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS", version = "1.1.0")
|
||||
public class ABSENCE_CELLS_FROM_AQUAMAPS extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Table_Label. the name of the Filtered Hcaf", defaultValue="AbsenceCells_", title="the name of the Filtered Hcaf", identifier = "Table_Label", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTable_Label(String data) {inputs.put("Table_Label",data);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: Aquamaps_HSPEC. an Aquamaps table from which to produce the absence points [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", title="an Aquamaps table from which to produce the absence points [a http link to a table in UTF-8 encoding following this template: (HSPEC) http://goo.gl/OvKa1h]", maxOccurs=1, minOccurs=1, identifier = "Aquamaps_HSPEC", binding = GenericFileDataBinding.class) public void setAquamaps_HSPEC(GenericFileData file) {inputs.put("Aquamaps_HSPEC",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Take_Randomly. a flag for taking points randomly (true) or close together (false)", defaultValue="true", allowedValues= {"true","false"}, title="a flag for taking points randomly (true) or close together (false)", identifier = "Take_Randomly", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setTake_Randomly(Boolean data) {inputs.put("Take_Randomly",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Number_of_Points. number of points to take", defaultValue="20", title="number of points to take", identifier = "Number_of_Points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setNumber_of_Points(Integer data) {inputs.put("Number_of_Points",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Species_Code. the species code according to the Fish-Base conventions", defaultValue="Fis-30189", title="the species code according to the Fish-Base conventions", identifier = "Species_Code", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies_Code(String data) {inputs.put("Species_Code",data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. a HCAF table containing Absence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="a HCAF table containing Absence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,42 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="BIONYM_LOCAL", abstrakt="A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL", version = "1.1.0")
|
||||
public class BIONYM_LOCAL extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SpeciesAuthorName. The scientific name of the species, possibly with authorship", defaultValue="Gadus morhua (Linnaeus, 1758)", title="The scientific name of the species, possibly with authorship", identifier = "SpeciesAuthorName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpeciesAuthorName(String data) {inputs.put("SpeciesAuthorName",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Taxa_Authority_File. The reference dataset to use", allowedValues= {"ASFIS","FISHBASE","OBIS","OBIS_ANIMALIA","OBIS_CNIDARIA","OBIS_ECHINODERMATA","OBIS_PLATYHELMINTHES","COL_FULL","COL_CHORDATA","COL_MAMMALIA","IRMNG_ACTINOPTERYGII","WORMS_ANIMALIA","WORMS_PISCES"}, defaultValue="FISHBASE", title="The reference dataset to use", identifier = "Taxa_Authority_File", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTaxa_Authority_File(String data) {inputs.put("Taxa_Authority_File",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Parser_Name. The Species - Authority parser", allowedValues= {"SIMPLE","GNI","NONE"}, defaultValue="SIMPLE", title="The Species - Authority parser", identifier = "Parser_Name", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setParser_Name(String data) {inputs.put("Parser_Name",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Activate_Preparsing_Processing. Use preparsing rules to correct common errors", defaultValue="true", allowedValues= {"true","false"}, title="Use preparsing rules to correct common errors", identifier = "Activate_Preparsing_Processing", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setActivate_Preparsing_Processing(Boolean data) {inputs.put("Activate_Preparsing_Processing",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Use_Stemmed_Genus_and_Species. Process using Genus and Species names without declension", defaultValue="false", allowedValues= {"true","false"}, title="Process using Genus and Species names without declension", identifier = "Use_Stemmed_Genus_and_Species", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setUse_Stemmed_Genus_and_Species(Boolean data) {inputs.put("Use_Stemmed_Genus_and_Species",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Accuracy_vs_Speed. A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", allowedValues= {"MAX_ACCURACY","LOW_SPEED","MEDIUM_SPEED","HIGH_SPEED","MAX_SPEED"}, defaultValue="MAX_ACCURACY", title="A trade-off between recognition speed and accuracy. Max speed corresponds to search for strings with the same length only.", identifier = "Accuracy_vs_Speed", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAccuracy_vs_Speed(String data) {inputs.put("Accuracy_vs_Speed",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_1. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="GSAy", title="Choose a Matcher", identifier = "Matcher_1", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_1(String data) {inputs.put("Matcher_1",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_1. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_1", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_1(Double data) {inputs.put("Threshold_1",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_1. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_1", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_1(Integer data) {inputs.put("MaxResults_1",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_2. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="FUZZYMATCH", title="Choose a Matcher", identifier = "Matcher_2", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_2(String data) {inputs.put("Matcher_2",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_2. Threshold", defaultValue="0.6", title="Threshold", identifier = "Threshold_2", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_2(Double data) {inputs.put("Threshold_2",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_2. The maximum number of matching candidates per each raw input species", defaultValue="10", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_2", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_2(Integer data) {inputs.put("MaxResults_2",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_3. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="LEVENSHTEIN", title="Choose a Matcher", identifier = "Matcher_3", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_3(String data) {inputs.put("Matcher_3",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_3. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_3", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_3(Double data) {inputs.put("Threshold_3",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_3. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_3", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_3(Integer data) {inputs.put("MaxResults_3",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_4. Choose a Matcher", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="TRIGRAM", title="Choose a Matcher", identifier = "Matcher_4", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_4(String data) {inputs.put("Matcher_4",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_4. Threshold", defaultValue="0.4", title="Threshold", identifier = "Threshold_4", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_4(Double data) {inputs.put("Threshold_4",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_4. The maximum number of matching candidates per each raw input species", defaultValue="5", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_4", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_4(Integer data) {inputs.put("MaxResults_4",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Matcher_5. Choose a Matcher (Optional)", allowedValues= {"GSAy","FUZZYMATCH","LEVENSHTEIN","SOUNDEX","LEV_SDX_TRIG","TRIGRAM","NONE"}, defaultValue="NONE", title="Choose a Matcher (Optional)", identifier = "Matcher_5", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setMatcher_5(String data) {inputs.put("Matcher_5",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Threshold_5. Threshold (def. 0.2)", defaultValue="0.2", title="Threshold (def. 0.2)", identifier = "Threshold_5", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setThreshold_5(Double data) {inputs.put("Threshold_5",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxResults_5. The maximum number of matching candidates per each raw input species", defaultValue="0", title="The maximum number of matching candidates per each raw input species", identifier = "MaxResults_5", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxResults_5(Integer data) {inputs.put("MaxResults_5",""+data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="CSQUARE_COLUMN_CREATOR", abstrakt="An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR", version = "1.1.0")
|
||||
public class CSQUARE_COLUMN_CREATOR extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Longitude_Column. The column containing Longitude information [the name of a column from InputTable]", defaultValue="x", title="The column containing Longitude information [the name of a column from InputTable]", identifier = "Longitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude_Column(String data) {inputs.put("Longitude_Column",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Latitude_Column. The column containing Latitude information [the name of a column from InputTable]", defaultValue="y", title="The column containing Latitude information [the name of a column from InputTable]", identifier = "Latitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude_Column(String data) {inputs.put("Latitude_Column",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: CSquare_Resolution. The resolution of the CSquare codes", defaultValue="0.1", title="The resolution of the CSquare codes", identifier = "CSquare_Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setCSquare_Resolution(Double data) {inputs.put("CSquare_Resolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="csquaretbl_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,30 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="ESRI_GRID_EXTRACTION", abstrakt="An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION", version = "1.1.0")
|
||||
public class ESRI_GRID_EXTRACTION extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLat. Lower Left Latitute of the Bounding Box", defaultValue="-60", title="Lower Left Latitute of the Bounding Box", identifier = "BBox_LowerLeftLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLat(Double data) {inputs.put("BBox_LowerLeftLat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLong. Lower Left Longitude of the Bounding Box", defaultValue="-50", title="Lower Left Longitude of the Bounding Box", identifier = "BBox_LowerLeftLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLong(Double data) {inputs.put("BBox_LowerLeftLong",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLat. Upper Right Latitute of the Bounding Box", defaultValue="60", title="Upper Right Latitute of the Bounding Box", identifier = "BBox_UpperRightLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLat(Double data) {inputs.put("BBox_UpperRightLat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLong. Upper Right Longitude of the Bounding Box", defaultValue="50", title="Upper Right Longitude of the Bounding Box", identifier = "BBox_UpperRightLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLong(Double data) {inputs.put("BBox_UpperRightLong",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Projection resolution on the X axis", defaultValue="0.5", title="Projection resolution on the X axis", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Projection resolution on the Y axis", defaultValue="0.5", title="Projection resolution on the Y axis", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: Output ESRI GRID ASCII FILE. Output ESRI GRID ASCII FILE", title="Output ESRI GRID ASCII FILE", identifier = "Output ESRI GRID ASCII FILE", binding = D4ScienceFileDataBinding.class) public GenericFileData getOutput_ESRI_GRID_ASCII_FILE() {URL url=null;try {url = new URL((String) outputs.get("Output ESRI GRID ASCII FILE")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="FAO_OCEAN_AREA_COLUMN_CREATOR", abstrakt="An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR", version = "1.1.0")
|
||||
public class FAO_OCEAN_AREA_COLUMN_CREATOR extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Longitude_Column. The column containing Longitude information [the name of a column from InputTable]", defaultValue="x", title="The column containing Longitude information [the name of a column from InputTable]", identifier = "Longitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude_Column(String data) {inputs.put("Longitude_Column",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Latitude_Column. The column containing Latitude information [the name of a column from InputTable]", defaultValue="y", title="The column containing Latitude information [the name of a column from InputTable]", identifier = "Latitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude_Column(String data) {inputs.put("Latitude_Column",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. The resolution of the FAO Ocean Area codes", defaultValue="5", title="The resolution of the FAO Ocean Area codes", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setResolution(Integer data) {inputs.put("Resolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="faooceanarea_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,27 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT", abstrakt="An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT", version = "1.1.0")
|
||||
public class FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The table to which the algorithm adds the csquare column [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Longitude_Column. The column containing Longitude information [the name of a column from InputTable]", defaultValue="x", title="The column containing Longitude information [the name of a column from InputTable]", identifier = "Longitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude_Column(String data) {inputs.put("Longitude_Column",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Latitude_Column. The column containing Latitude information [the name of a column from InputTable]", defaultValue="y", title="The column containing Latitude information [the name of a column from InputTable]", identifier = "Latitude_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude_Column(String data) {inputs.put("Latitude_Column",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Quadrant_Column. The column containing Quadrant information [the name of a column from InputTable]", defaultValue="quadrant", title="The column containing Quadrant information [the name of a column from InputTable]", identifier = "Quadrant_Column", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuadrant_Column(String data) {inputs.put("Quadrant_Column",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. The resolution of the FAO Ocean Area codes", defaultValue="5", title="The resolution of the FAO Ocean Area codes", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setResolution(Integer data) {inputs.put("Resolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="faooceanarea_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,25 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="GENERIC_CHARTS", abstrakt="An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS", version = "1.1.0")
|
||||
public class GENERIC_CHARTS extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TopElementsNumber. Max number of elements, with highest values, to visualize", defaultValue="10", title="Max number of elements, with highest values, to visualize", identifier = "TopElementsNumber", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTopElementsNumber(Integer data) {inputs.put("TopElementsNumber",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Attributes. The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", identifier = "Attributes", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAttributes(String data) {inputs.put("Attributes",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,25 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="GEO_CHART", abstrakt="An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART", version = "1.1.0")
|
||||
public class GEO_CHART extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Longitude. The column containing longitude decimal values [the name of a column from InputTable]", defaultValue="long", title="The column containing longitude decimal values [the name of a column from InputTable]", identifier = "Longitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude(String data) {inputs.put("Longitude",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Latitude. The column containing latitude decimal values [the name of a column from InputTable]", defaultValue="lat", title="The column containing latitude decimal values [the name of a column from InputTable]", identifier = "Latitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude(String data) {inputs.put("Latitude",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="HCAF_FILTER", abstrakt="An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER", version = "1.1.0")
|
||||
public class HCAF_FILTER extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Table_Label. the name of the Filtered Hcaf", defaultValue="hcaf_filtered", title="the name of the Filtered Hcaf", identifier = "Table_Label", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTable_Label(String data) {inputs.put("Table_Label",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Left_Lower_Lat. the left lower latitude of the bounding box (range [-90,+90])", defaultValue="-17.098", title="the left lower latitude of the bounding box (range [-90,+90])", identifier = "B_Box_Left_Lower_Lat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Left_Lower_Lat(Double data) {inputs.put("B_Box_Left_Lower_Lat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Left_Lower_Long. the left lower longitude of the bounding box (range [-180,+180])", defaultValue="89.245", title="the left lower longitude of the bounding box (range [-180,+180])", identifier = "B_Box_Left_Lower_Long", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Left_Lower_Long(Double data) {inputs.put("B_Box_Left_Lower_Long",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Right_Upper_Lat. the right upper latitude of the bounding box (range [-90,+90])", defaultValue="25.086", title="the right upper latitude of the bounding box (range [-90,+90])", identifier = "B_Box_Right_Upper_Lat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Right_Upper_Lat(Double data) {inputs.put("B_Box_Right_Upper_Lat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: B_Box_Right_Upper_Long. the right upper longitude of the bounding box (range [-180,+180])", defaultValue="147.642", title="the right upper longitude of the bounding box (range [-180,+180])", identifier = "B_Box_Right_Upper_Long", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setB_Box_Right_Upper_Long(Double data) {inputs.put("B_Box_Right_Upper_Long",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,39 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="MAX_ENT_NICHE_MODELLING", abstrakt="A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING", version = "1.1.0")
|
||||
public class MAX_ENT_NICHE_MODELLING extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="maxent_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to", defaultValue="generic_species", title="The name of the species to model and the occurrence records refer to", identifier = "SpeciesName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpeciesName(String data) {inputs.put("SpeciesName",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm", defaultValue="1000", title="The number of learning iterations of the MaxEnt algorithm", identifier = "MaxIterations", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setMaxIterations(Integer data) {inputs.put("MaxIterations",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points", defaultValue="0.5", title="A priori probability of presence at ordinary occurrence points", identifier = "DefaultPrevalence", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setDefaultPrevalence(Double data) {inputs.put("DefaultPrevalence",""+data);}
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", title="A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", maxOccurs=1, minOccurs=1, identifier = "OccurrencesTable", binding = GenericFileDataBinding.class) public void setOccurrencesTable(GenericFileData file) {inputs.put("OccurrencesTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable]", defaultValue="decimallongitude", title="The column containing longitude values [the name of a column from OccurrencesTable]", identifier = "LongitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitudeColumn(String data) {inputs.put("LongitudeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable]", defaultValue="decimallatitude", title="The column containing latitude values [the name of a column from OccurrencesTable]", identifier = "LatitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitudeColumn(String data) {inputs.put("LatitudeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees", defaultValue="1", title="Model projection resolution on the X axis in decimal degrees", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees", defaultValue="1", title="Model projection resolution on the Y axis in decimal degrees", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String)", defaultValue="", title="The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String)", identifier = "Layers", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayers(String data) {inputs.put("Layers",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets", defaultValue="0", title="Time Index. The default is the first time indexed in the input environmental datasets", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
|
||||
@LiteralDataOutput(abstrakt="Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments", title="Best threshold for transforming MaxEnt values into 0/1 probability assignments", identifier = "Best Threshold", binding = LiteralStringBinding.class) public String getBest_Threshold() {return (String) outputs.get("Best Threshold");}
|
||||
@LiteralDataOutput(abstrakt="Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species", title="The a posteriori estimated prevalence of the species", identifier = "Estimated Prevalence", binding = LiteralStringBinding.class) public String getEstimated_Prevalence() {return (String) outputs.get("Estimated Prevalence");}
|
||||
@LiteralDataOutput(abstrakt="Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates", title="The contribution of each variable to the MaxEnt values estimates", identifier = "Variables contributions", binding = LiteralStringBinding.class) public String getVariables_contributions() {return (String) outputs.get("Variables contributions");}
|
||||
@LiteralDataOutput(abstrakt="Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training", title="The importance of the permutations of the variables during the training", identifier = "Variables Permutations Importance", binding = LiteralStringBinding.class) public String getVariables_Permutations_Importance() {return (String) outputs.get("Variables Permutations Importance");}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments", title="ASCII Maps of the environmental layers for checking features aligments", identifier = "ASCII Maps of the environmental layers for checking features aligments", binding = D4ScienceFileDataBinding.class) public GenericFileData getASCII_Maps_of_the_environmental_layers_for_checking_features_aligments() {URL url=null;try {url = new URL((String) outputs.get("ASCII Maps of the environmental layers for checking features aligments")); return new GenericFileData(url.openStream(),"application/d4science");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable7", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable7() {URL url=null;try {url = new URL((String) outputs.get("OutputTable7")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,23 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="OBIS_MOST_OBSERVED_SPECIES", abstrakt="An algorithm producing a bar chart for the most observed species in a certain years range (with respect to the OBIS database)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_SPECIES", version = "1.1.0")
|
||||
public class OBIS_MOST_OBSERVED_SPECIES extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Species_number. Number of species to report (max 17 will be visualized on the chart)", defaultValue="10", title="Number of species to report (max 17 will be visualized on the chart)", identifier = "Species_number", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies_number(String data) {inputs.put("Species_number",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,24 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="OBIS_MOST_OBSERVED_TAXA", abstrakt="An algorithm producing a bar chart for the most observed taxa in a certain years range (with respect to the OBIS database)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_MOST_OBSERVED_TAXA", version = "1.1.0")
|
||||
public class OBIS_MOST_OBSERVED_TAXA extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Taxa_number. Number of taxa to report", defaultValue="10", title="Number of taxa to report", identifier = "Taxa_number", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTaxa_number(String data) {inputs.put("Taxa_number",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Level. Choose the taxonomy level", allowedValues= {"GENUS","FAMILY","ORDER","CLASS"}, defaultValue="GENUS", title="Choose the taxonomy level", identifier = "Level", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLevel(String data) {inputs.put("Level",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,24 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA", abstrakt="An algorithm producing a bar chart for the distribution of a species along a certain type of marine area (e.g. LME or MEOW)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA", version = "1.1.0")
|
||||
public class OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Species. The species to analyze", defaultValue="", title="The species to analyze", identifier = "Species", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies(String data) {inputs.put("Species",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Area. Choose the area type", allowedValues= {"LME","MEOW"}, defaultValue="LME", title="Choose the area type", identifier = "Area", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setArea(String data) {inputs.put("Area",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title=" Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,24 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA", abstrakt="Algorithm returning most observed species in a specific years range (data collected from OBIS database).", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA", version = "1.1.0")
|
||||
public class OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Area_type. Choose the area name", allowedValues= {"AGULHAS CURRENT","ANTARCTICA","ARABIAN SEA","BALTIC SEA","BARENTS SEA","BAY OF BENGAL","BEAUFORT SEA","BENGUELA CURRENT","BLACK SEA","CALIFORNIA CURRENT","CANARY CURRENT","CARIBBEAN SEA","CELTIC-BISCAY SHELF","CHUKCHI SEA","EAST BERING SEA","EAST BRAZIL SHELF","EAST CENTRAL AUSTRALIAN SHELF","EAST CHINA SEA","EAST GREENLAND SHELF","EAST SIBERIAN SEA","FAROE PLATEAU","GUINEA CURRENT","GULF OF ALASKA","GULF OF CALIFORNIA","GULF OF MEXICO","GULF OF THAILAND","HUDSON BAY","HUMBOLDT CURRENT","IBERIAN COASTAL","ICELAND SHELF","INDONESIAN SEA","INSULAR PACIFIC-HAWAIIAN","KARA SEA","KUROSHIO CURRENT","LAPTEV SEA","MEDITERRANEAN SEA","NEWFOUNDLAND-LABRADOR SHELF","NEW ZEALAND SHELF","NORTH AUSTRALIAN SHELF","NORTH BRAZIL SHELF","NORTHEAST AUSTRALIAN SHELF","NORTHEAST U.S. CONTINENTAL SHELF","NORTH SEA","NORTHWEST AUSTRALIAN SHELF","NORWEGIAN SEA","OYASHIO CURRENT","PACIFIC CENTRAL-AMERICAN COASTAL","PATAGONIAN SHELF","RED SEA","SCOTIAN SHELF","SEA OF JAPAN","SEA OF OKHOTSK","SOMALI COASTAL CURRENT","SOUTH BRAZIL SHELF","SOUTH CHINA SEA","SOUTHEAST AUSTRALIAN SHELF","SOUTHEAST U.S. CONTINENTAL SHELF","SOUTHWEST AUSTRALIAN SHELF","SULU-CELEBES SEA","WEST BERING SEA","WEST CENTRAL AUSTRALIAN SHELF","WEST GREENLAND SHELF","YELLOW SEA"}, defaultValue="AGULHAS CURRENT", title="Choose the area name", identifier = "Area_type", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setArea_type(String data) {inputs.put("Area_type",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Selected species. List of the species to analyze [a sequence of values separated by | ] (format: String)", defaultValue="", title="List of the species to analyze [a sequence of values separated by | ] (format: String)", identifier = "Selected species", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelected_species(String data) {inputs.put("Selected species",data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
File diff suppressed because one or more lines are too long
|
@ -1,23 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="OBIS_SPECIES_OBSERVATIONS_PER_YEAR", abstrakt="An algorithm producing the trend of the observations for a certain species in a certain years range.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_SPECIES_OBSERVATIONS_PER_YEAR", version = "1.1.0")
|
||||
public class OBIS_SPECIES_OBSERVATIONS_PER_YEAR extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Selected species. List of the species to analyze [a sequence of values separated by | ] (format: String)", defaultValue="", title="List of the species to analyze [a sequence of values separated by | ] (format: String)", identifier = "Selected species", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelected_species(String data) {inputs.put("Selected species",data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,24 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="OBIS_TAXA_OBSERVATIONS_PER_YEAR", abstrakt="Algorithm returning most observations taxonomy trend in a specific years range (with respect to the OBIS database)", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OBIS_TAXA_OBSERVATIONS_PER_YEAR", version = "1.1.0")
|
||||
public class OBIS_TAXA_OBSERVATIONS_PER_YEAR extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Level. Choose the taxonomy level", allowedValues= {"GENUS","FAMILY","ORDER","CLASS"}, defaultValue="GENUS", title="Choose the taxonomy level", identifier = "Level", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLevel(String data) {inputs.put("Level",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Start_year. Starting year of the analysis", defaultValue="1800", title="Starting year of the analysis", identifier = "Start_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setStart_year(String data) {inputs.put("Start_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: End_year. Ending year of the analysis", defaultValue="2020", title="Ending year of the analysis", identifier = "End_year", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setEnd_year(String data) {inputs.put("End_year",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Selected taxonomy. List of taxa to analyze [a sequence of values separated by | ] (format: String)", defaultValue="", title="List of taxa to analyze [a sequence of values separated by | ] (format: String)", identifier = "Selected taxonomy", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSelected_taxonomy(String data) {inputs.put("Selected taxonomy",data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,31 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="OCCURRENCE_ENRICHMENT", abstrakt="An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT", version = "1.1.0")
|
||||
public class OCCURRENCE_ENRICHMENT extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: OccurrenceTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", title="A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5]", maxOccurs=1, minOccurs=1, identifier = "OccurrenceTable", binding = GenericFileDataBinding.class) public void setOccurrenceTable(GenericFileData file) {inputs.put("OccurrenceTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrenceTable]", defaultValue="decimallongitude", title="The column containing longitude values [the name of a column from OccurrenceTable]", identifier = "LongitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitudeColumn(String data) {inputs.put("LongitudeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrenceTable]", defaultValue="decimallatitude", title="The column containing latitude values [the name of a column from OccurrenceTable]", identifier = "LatitudeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitudeColumn(String data) {inputs.put("LatitudeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: ScientificNameColumn. The column containing Scientific Names [the name of a column from OccurrenceTable]", defaultValue="scientificname", title="The column containing Scientific Names [the name of a column from OccurrenceTable]", identifier = "ScientificNameColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setScientificNameColumn(String data) {inputs.put("ScientificNameColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeColumn. The column containing time information [the name of a column from OccurrenceTable]", defaultValue="eventdate", title="The column containing time information [the name of a column from OccurrenceTable]", identifier = "TimeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTimeColumn(String data) {inputs.put("TimeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OptionalFilter. A filter on one of the columns (e.g. basisofrecord='HumanObservation'). Optional", defaultValue=" ", title="A filter on one of the columns (e.g. basisofrecord='HumanObservation'). Optional", identifier = "OptionalFilter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOptionalFilter(String data) {inputs.put("OptionalFilter",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. The spatial resolution in degrees of the association between observations and environmental features", defaultValue="0.5", title="The spatial resolution in degrees of the association between observations and environmental features", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableName. The name of the output table", defaultValue="enrich_", title="The name of the output table", identifier = "OutputTableName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableName(String data) {inputs.put("OutputTableName",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ) [a sequence of values separated by | ] (format: String)", defaultValue="", title="The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ) [a sequence of values separated by | ] (format: String)", identifier = "Layers", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayers(String data) {inputs.put("Layers",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FeaturesNames. The list of names for the columns corresponding to the environmental layers. These will be the column names of the resulting table [a sequence of values separated by | ] (format: String)", defaultValue="", title="The list of names for the columns corresponding to the environmental layers. These will be the column names of the resulting table [a sequence of values separated by | ] (format: String)", identifier = "FeaturesNames", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setFeaturesNames(String data) {inputs.put("FeaturesNames",data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,24 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="PRESENCE_CELLS_GENERATION", abstrakt="An algorithm producing cells and features (HCAF) for a species containing presence points", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION", version = "1.1.0")
|
||||
public class PRESENCE_CELLS_GENERATION extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Table_Label. the name of the Filtered Hcaf", defaultValue="PresenceCells_", title="the name of the Filtered Hcaf", identifier = "Table_Label", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTable_Label(String data) {inputs.put("Table_Label",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Number_of_Points. Maximum number of points to take (-1 to take all)", defaultValue="-1", title="Maximum number of points to take (-1 to take all)", identifier = "Number_of_Points", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setNumber_of_Points(Integer data) {inputs.put("Number_of_Points",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Species_Code. the species code according to the Fish-Base conventions", defaultValue="Fis-30189", title="the species code according to the Fish-Base conventions", identifier = "Species_Code", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSpecies_Code(String data) {inputs.put("Species_Code",data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable. a HCAF table containing Presence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", title="a HCAF table containing Presence Points cells [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM]", identifier = "OutputTable", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable() {URL url=null;try {url = new URL((String) outputs.get("OutputTable")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,33 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="SGVM_INTERPOLATION", abstrakt="An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION", version = "1.1.0")
|
||||
public class SGVM_INTERPOLATION extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputFile. Input file in TACSAT format. E.g. http://goo.gl/i16kPw", title="Input file in TACSAT format. E.g. http://goo.gl/i16kPw", maxOccurs=1, minOccurs=1, identifier = "InputFile", binding = D4ScienceDataInputBinding.class) public void setInputFile(GenericFileData file) {inputs.put("InputFile",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: npoints. The number of pings or positions required between each real or actual vessel position or ping", defaultValue="10", title="The number of pings or positions required between each real or actual vessel position or ping", identifier = "npoints", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setnpoints(Integer data) {inputs.put("npoints",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: interval. Average time in minutes between two adjacent datapoints", defaultValue="120", title="Average time in minutes between two adjacent datapoints", identifier = "interval", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setinterval(Integer data) {inputs.put("interval",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: margin. Maximum deviation from specified interval to find adjacent datapoints (tolerance)", defaultValue="10", title="Maximum deviation from specified interval to find adjacent datapoints (tolerance)", identifier = "margin", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setmargin(Integer data) {inputs.put("margin",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: res. Number of points to use to create interpolation (including start and end point)", defaultValue="100", title="Number of points to use to create interpolation (including start and end point)", identifier = "res", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setres(Integer data) {inputs.put("res",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: method. Set to cHs for cubic Hermite spline or SL for Straight Line interpolation", allowedValues= {"cHs","SL"}, defaultValue="cHs", title="Set to cHs for cubic Hermite spline or SL for Straight Line interpolation", identifier = "method", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setmethod(String data) {inputs.put("method",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: fm. The FM parameter in cubic interpolation", defaultValue="0.5", title="The FM parameter in cubic interpolation", identifier = "fm", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setfm(Double data) {inputs.put("fm",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: distscale. The DistScale parameter for cubic interpolation", defaultValue="20", title="The DistScale parameter for cubic interpolation", identifier = "distscale", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setdistscale(Integer data) {inputs.put("distscale",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: sigline. The Sigline parameter in cubic interpolation", defaultValue="0.2", title="The Sigline parameter in cubic interpolation", identifier = "sigline", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setsigline(Double data) {inputs.put("sigline",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: minspeedThr. A filter on the minimum speed to take into account for interpolation", defaultValue="2", title="A filter on the minimum speed to take into account for interpolation", identifier = "minspeedThr", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setminspeedThr(Double data) {inputs.put("minspeedThr",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: maxspeedThr. A filter on the maximum speed to take into account for interpolation", defaultValue="6", title="A filter on the maximum speed to take into account for interpolation", identifier = "maxspeedThr", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setmaxspeedThr(Double data) {inputs.put("maxspeedThr",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: headingAdjustment. Parameter to adjust the choice of heading depending on its own or previous point (0 or 1). Set 1 in case the heading at the endpoint does not represent the heading of the arriving vessel to that point but the departing vessel.", defaultValue="0", title="Parameter to adjust the choice of heading depending on its own or previous point (0 or 1). Set 1 in case the heading at the endpoint does not represent the heading of the arriving vessel to that point but the departing vessel.", identifier = "headingAdjustment", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setheadingAdjustment(Integer data) {inputs.put("headingAdjustment",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: equalDist. Whether the number of positions returned should be equally spaced or not", defaultValue="true", allowedValues= {"true","false"}, title="Whether the number of positions returned should be equally spaced or not", identifier = "equalDist", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setequalDist(Boolean data) {inputs.put("equalDist",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,27 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="SUBMITQUERY", abstrakt="Algorithm that allows to submit a query", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY", version = "1.1.0")
|
||||
public class SUBMITQUERY extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: ResourceName. The name of the resource", defaultValue="", title="The name of the resource", identifier = "ResourceName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setResourceName(String data) {inputs.put("ResourceName",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: DatabaseName. The name of the database", defaultValue="", title="The name of the database", identifier = "DatabaseName", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setDatabaseName(String data) {inputs.put("DatabaseName",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Read-Only Query. Check the box if the query must be read-only", defaultValue="true", allowedValues= {"true","false"}, title="Check the box if the query must be read-only", identifier = "Read-Only Query", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setRead_Only_Query(Boolean data) {inputs.put("Read-Only Query",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Apply Smart Correction. Check the box for smart correction", defaultValue="true", allowedValues= {"true","false"}, title="Check the box for smart correction", identifier = "Apply Smart Correction", maxOccurs=1, minOccurs=1,binding = LiteralBooleanBinding.class) public void setApply_Smart_Correction(Boolean data) {inputs.put("Apply Smart Correction",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Language. Language", allowedValues= {"NONE","POSTGRES","MYSQL"}, defaultValue="NONE", title="Language", identifier = "Language", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLanguage(String data) {inputs.put("Language",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Query. query", defaultValue="", title="query", identifier = "Query", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuery(String data) {inputs.put("Query",data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,29 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="TIMEEXTRACTION", abstrakt="An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION", version = "1.1.0")
|
||||
public class TIMEEXTRACTION extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Extraction point resolution", defaultValue="0.5", title="Extraction point resolution", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SamplingFreq. Sampling frequency in Hz. Leave it to -1 if unknown or under 1", defaultValue="-1", title="Sampling frequency in Hz. Leave it to -1 if unknown or under 1", identifier = "SamplingFreq", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSamplingFreq(Integer data) {inputs.put("SamplingFreq",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@LiteralDataOutput(abstrakt="Name of the parameter: Note. Note about the signal", title="Note about the signal", identifier = "Note", binding = LiteralStringBinding.class) public String getNote() {return (String) outputs.get("Note");}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,35 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="TIMEEXTRACTION_TABLE", abstrakt="An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE", version = "1.1.0")
|
||||
public class TIMEEXTRACTION_TABLE extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: geoReferencedTableName. A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "geoReferencedTableName", binding = GenericFileDataBinding.class) public void setgeoReferencedTableName(GenericFileData file) {inputs.put("geoReferencedTableName",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: xColumn. The column containing x (longitude) information [the name of a column from geoReferencedTableName]", defaultValue="x", title="The column containing x (longitude) information [the name of a column from geoReferencedTableName]", identifier = "xColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setxColumn(String data) {inputs.put("xColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: yColumn. The column containing y (latitude) information [the name of a column from geoReferencedTableName]", defaultValue="y", title="The column containing y (latitude) information [the name of a column from geoReferencedTableName]", identifier = "yColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setyColumn(String data) {inputs.put("yColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: timeColumn. The column containing time information [the name of a column from geoReferencedTableName]", defaultValue="datetime", title="The column containing time information [the name of a column from geoReferencedTableName]", identifier = "timeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void settimeColumn(String data) {inputs.put("timeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: valueColumn. A column containing real valued features [the name of a column from geoReferencedTableName]", defaultValue="value", title="A column containing real valued features [the name of a column from geoReferencedTableName]", identifier = "valueColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setvalueColumn(String data) {inputs.put("valueColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: filter. A filter on one of the columns (e.g. speed=2)", defaultValue=" ", title="A filter on one of the columns (e.g. speed=2)", identifier = "filter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setfilter(String data) {inputs.put("filter",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: zColumn. The column containing z (altitude or depth) information (optional)", defaultValue="z", title="The column containing z (altitude or depth) information (optional)", identifier = "zColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setzColumn(String data) {inputs.put("zColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Extraction point resolution", defaultValue="0.5", title="Extraction point resolution", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SamplingFreq. Sampling frequency in Hz. Leave it to -1 if unknown or under 1", defaultValue="-1", title="Sampling frequency in Hz. Leave it to -1 if unknown or under 1", identifier = "SamplingFreq", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSamplingFreq(Integer data) {inputs.put("SamplingFreq",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@LiteralDataOutput(abstrakt="Name of the parameter: Note. Note about the signal", title="Note about the signal", identifier = "Note", binding = LiteralStringBinding.class) public String getNote() {return (String) outputs.get("Note");}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="TIME_GEO_CHART", abstrakt="An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART", version = "1.1.0")
|
||||
public class TIME_GEO_CHART extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Longitude. The column containing longitude decimal values [the name of a column from InputTable]", defaultValue="long", title="The column containing longitude decimal values [the name of a column from InputTable]", identifier = "Longitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLongitude(String data) {inputs.put("Longitude",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Latitude. The column containing latitude decimal values [the name of a column from InputTable]", defaultValue="lat", title="The column containing latitude decimal values [the name of a column from InputTable]", identifier = "Latitude", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLatitude(String data) {inputs.put("Latitude",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Time. The column containing time information [the name of a column from InputTable]", defaultValue="year", title="The column containing time information [the name of a column from InputTable]", identifier = "Time", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTime(String data) {inputs.put("Time",data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,29 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="TIME_SERIES_ANALYSIS", abstrakt="An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS", version = "1.1.0")
|
||||
public class TIME_SERIES_ANALYSIS extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: TimeSeriesTable. The table containing the time series [a http link to a table in UTF-8 encoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="The table containing the time series [a http link to a table in UTF-8 encoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", maxOccurs=1, minOccurs=1, identifier = "TimeSeriesTable", binding = GenericFileDataBinding.class) public void setTimeSeriesTable(GenericFileData file) {inputs.put("TimeSeriesTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: ValueColum. The column containing the values of the time series [the name of a column from TimeSeriesTable]", defaultValue="values", title="The column containing the values of the time series [the name of a column from TimeSeriesTable]", identifier = "ValueColum", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setValueColum(String data) {inputs.put("ValueColum",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: FFT_Window_Samples. The number of samples N on which the Fourier Transform (FFT) will be extracted. It should be a power of two and less than the signal length, otherwise it will be automatically recalculated. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", defaultValue="12", title="The number of samples N on which the Fourier Transform (FFT) will be extracted. It should be a power of two and less than the signal length, otherwise it will be automatically recalculated. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", identifier = "FFT_Window_Samples", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setFFT_Window_Samples(Integer data) {inputs.put("FFT_Window_Samples",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: AggregationFunction. Function to apply to samples with the same time instant", allowedValues= {"SUM","AVG"}, defaultValue="SUM", title="Function to apply to samples with the same time instant", identifier = "AggregationFunction", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAggregationFunction(String data) {inputs.put("AggregationFunction",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Sensitivity. Sensitivity to the frequency components. High sensitivity will report all the frequency components, low sensitivity will report only the most distant ones.", allowedValues= {"LOW","NORMAL","HIGH"}, defaultValue="LOW", title="Sensitivity to the frequency components. High sensitivity will report all the frequency components, low sensitivity will report only the most distant ones.", identifier = "Sensitivity", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setSensitivity(String data) {inputs.put("Sensitivity",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SSA_Window_in_Samples. The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. Must be strictly less than the Time Series length. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", defaultValue="20", title="The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. Must be strictly less than the Time Series length. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", identifier = "SSA_Window_in_Samples", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSSA_Window_in_Samples(Integer data) {inputs.put("SSA_Window_in_Samples",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SSA_EigenvaluesThreshold. The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", defaultValue="0.7", title="The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", identifier = "SSA_EigenvaluesThreshold", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setSSA_EigenvaluesThreshold(Double data) {inputs.put("SSA_EigenvaluesThreshold",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: SSA_Points_to_Forecast. The number of points to forecast over the original length of the time series", defaultValue="10", title="The number of points to forecast over the original length of the time series", identifier = "SSA_Points_to_Forecast", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setSSA_Points_to_Forecast(Integer data) {inputs.put("SSA_Points_to_Forecast",""+data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,25 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="TIME_SERIES_CHARTS", abstrakt="An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS", version = "1.1.0")
|
||||
public class TIME_SERIES_CHARTS extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "InputTable", binding = GenericFileDataBinding.class) public void setInputTable(GenericFileData file) {inputs.put("InputTable",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Attributes. The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The dimensions to consider in the charts [a sequence of names of columns from InputTable separated by | ]", identifier = "Attributes", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setAttributes(String data) {inputs.put("Attributes",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", defaultValue="", title="The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ]", identifier = "Quantities", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setQuantities(String data) {inputs.put("Quantities",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Time. The column containing time information [the name of a column from InputTable]", defaultValue="year", title="The column containing time information [the name of a column from InputTable]", identifier = "Time", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setTime(String data) {inputs.put("Time",data);}
|
||||
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,31 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="XYEXTRACTOR", abstrakt="An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR", version = "1.1.0")
|
||||
public class XYEXTRACTOR extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLat. Lower Left Latitute of the Bounding Box", defaultValue="-60", title="Lower Left Latitute of the Bounding Box", identifier = "BBox_LowerLeftLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLat(Double data) {inputs.put("BBox_LowerLeftLat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLong. Lower Left Longitude of the Bounding Box", defaultValue="-50", title="Lower Left Longitude of the Bounding Box", identifier = "BBox_LowerLeftLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLong(Double data) {inputs.put("BBox_LowerLeftLong",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLat. Upper Right Latitute of the Bounding Box", defaultValue="60", title="Upper Right Latitute of the Bounding Box", identifier = "BBox_UpperRightLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLat(Double data) {inputs.put("BBox_UpperRightLat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLong. Upper Right Longitude of the Bounding Box", defaultValue="50", title="Upper Right Longitude of the Bounding Box", identifier = "BBox_UpperRightLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLong(Double data) {inputs.put("BBox_UpperRightLong",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Projection resolution on the X axis", defaultValue="0.5", title="Projection resolution on the X axis", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Projection resolution on the Y axis", defaultValue="0.5", title="Projection resolution on the Y axis", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,37 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="XYEXTRACTOR_TABLE", abstrakt="An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE", version = "1.1.0")
|
||||
public class XYEXTRACTOR_TABLE extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: geoReferencedTableName. A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "geoReferencedTableName", binding = GenericFileDataBinding.class) public void setgeoReferencedTableName(GenericFileData file) {inputs.put("geoReferencedTableName",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: xColumn. The column containing x (longitude) information [the name of a column from geoReferencedTableName]", defaultValue="x", title="The column containing x (longitude) information [the name of a column from geoReferencedTableName]", identifier = "xColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setxColumn(String data) {inputs.put("xColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: yColumn. The column containing y (latitude) information [the name of a column from geoReferencedTableName]", defaultValue="y", title="The column containing y (latitude) information [the name of a column from geoReferencedTableName]", identifier = "yColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setyColumn(String data) {inputs.put("yColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: valueColumn. A column containing real valued features [the name of a column from geoReferencedTableName]", defaultValue="value", title="A column containing real valued features [the name of a column from geoReferencedTableName]", identifier = "valueColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setvalueColumn(String data) {inputs.put("valueColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: filter. A filter on one of the columns (e.g. speed=2)", defaultValue=" ", title="A filter on one of the columns (e.g. speed=2)", identifier = "filter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setfilter(String data) {inputs.put("filter",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: zColumn. The column containing z (altitude or depth) information (optional)", defaultValue="z", title="The column containing z (altitude or depth) information (optional)", identifier = "zColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setzColumn(String data) {inputs.put("zColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: timeColumn. The column containing time (otional)", defaultValue="datetime", title="The column containing time (otional)", identifier = "timeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void settimeColumn(String data) {inputs.put("timeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLat. Lower Left Latitute of the Bounding Box", defaultValue="-60", title="Lower Left Latitute of the Bounding Box", identifier = "BBox_LowerLeftLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLat(Double data) {inputs.put("BBox_LowerLeftLat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_LowerLeftLong. Lower Left Longitude of the Bounding Box", defaultValue="-50", title="Lower Left Longitude of the Bounding Box", identifier = "BBox_LowerLeftLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_LowerLeftLong(Double data) {inputs.put("BBox_LowerLeftLong",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLat. Upper Right Latitute of the Bounding Box", defaultValue="60", title="Upper Right Latitute of the Bounding Box", identifier = "BBox_UpperRightLat", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLat(Double data) {inputs.put("BBox_UpperRightLat",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: BBox_UpperRightLong. Upper Right Longitude of the Bounding Box", defaultValue="50", title="Upper Right Longitude of the Bounding Box", identifier = "BBox_UpperRightLong", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setBBox_UpperRightLong(Double data) {inputs.put("BBox_UpperRightLong",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Z. Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", defaultValue="0", title="Value of Z. Default is 0, that means processing will be at surface level or at the first avaliable Z value in the layer", identifier = "Z", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setZ(Double data) {inputs.put("Z",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: XResolution. Projection resolution on the X axis", defaultValue="0.5", title="Projection resolution on the X axis", identifier = "XResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setXResolution(Double data) {inputs.put("XResolution",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: YResolution. Projection resolution on the Y axis", defaultValue="0.5", title="Projection resolution on the Y axis", identifier = "YResolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setYResolution(Double data) {inputs.put("YResolution",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,27 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="ZEXTRACTION", abstrakt="An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION", version = "1.1.0")
|
||||
public class ZEXTRACTION extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Layer. Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", defaultValue="", title="Layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS ASC, GeoTiff )", identifier = "Layer", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setLayer(String data) {inputs.put("Layer",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Step for Z values", defaultValue="100", title="Step for Z values", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,33 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.io.StringWriter;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.bindings.*;
|
||||
import org.n52.wps.algorithm.annotation.*;
|
||||
import org.n52.wps.io.data.*;
|
||||
import org.n52.wps.io.data.binding.complex.*;
|
||||
import org.n52.wps.io.data.binding.literal.*;
|
||||
import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;import org.n52.wps.server.*;import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.*;
|
||||
@Algorithm(statusSupported=true, title="ZEXTRACTION_TABLE", abstrakt="An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram.", identifier="org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE", version = "1.1.0")
|
||||
public class ZEXTRACTION_TABLE extends AbstractEcologicalEngineMapper implements ITransducer{
|
||||
@ComplexDataInput(abstrakt="Name of the parameter: geoReferencedTableName. A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", title="A geospatial table containing at least x,y information [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding]", maxOccurs=1, minOccurs=1, identifier = "geoReferencedTableName", binding = GenericFileDataBinding.class) public void setgeoReferencedTableName(GenericFileData file) {inputs.put("geoReferencedTableName",file);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: xColumn. The column containing x (longitude) information [the name of a column from geoReferencedTableName]", defaultValue="x", title="The column containing x (longitude) information [the name of a column from geoReferencedTableName]", identifier = "xColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setxColumn(String data) {inputs.put("xColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: yColumn. The column containing y (latitude) information [the name of a column from geoReferencedTableName]", defaultValue="y", title="The column containing y (latitude) information [the name of a column from geoReferencedTableName]", identifier = "yColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setyColumn(String data) {inputs.put("yColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: zColumn. The column containing z information [the name of a column from geoReferencedTableName]", defaultValue="z", title="The column containing z information [the name of a column from geoReferencedTableName]", identifier = "zColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setzColumn(String data) {inputs.put("zColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: valueColumn. A column containing real valued features [the name of a column from geoReferencedTableName]", defaultValue="value", title="A column containing real valued features [the name of a column from geoReferencedTableName]", identifier = "valueColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setvalueColumn(String data) {inputs.put("valueColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: filter. A filter on one of the columns (e.g. speed=2)", defaultValue=" ", title="A filter on one of the columns (e.g. speed=2)", identifier = "filter", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setfilter(String data) {inputs.put("filter",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: timeColumn. The column containing time information (optional).", defaultValue="time", title="The column containing time information (optional).", identifier = "timeColumn", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void settimeColumn(String data) {inputs.put("timeColumn",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: OutputTableLabel. The name of the table to produce", defaultValue="extr_", title="The name of the table to produce", identifier = "OutputTableLabel", maxOccurs=1, minOccurs=1, binding = LiteralStringBinding.class) public void setOutputTableLabel(String data) {inputs.put("OutputTableLabel",data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: X. X coordinate", defaultValue="0", title="X coordinate", identifier = "X", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setX(Double data) {inputs.put("X",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Y. Y coordinate", defaultValue="0", title="Y coordinate", identifier = "Y", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setY(Double data) {inputs.put("Y",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: TimeIndex. Time Index. The default is the first time indexed dataset", defaultValue="0", title="Time Index. The default is the first time indexed dataset", identifier = "TimeIndex", maxOccurs=1, minOccurs=1, binding = LiteralIntBinding.class) public void setTimeIndex(Integer data) {inputs.put("TimeIndex",""+data);}
|
||||
@LiteralDataInput(abstrakt="Name of the parameter: Resolution. Step for Z values", defaultValue="100", title="Step for Z values", identifier = "Resolution", maxOccurs=1, minOccurs=1, binding = LiteralDoubleBinding.class) public void setResolution(Double data) {inputs.put("Resolution",""+data);}
|
||||
@ComplexDataOutput(abstrakt="Name of the parameter: OutputTable1. Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", title="Output table [a http link to a table in UTF-8 ecoding following this template: (TIMESERIES) http://goo.gl/DoW6fg]", identifier = "OutputTable1", binding = CsvFileDataBinding.class) public GenericFileData getOutputTable1() {URL url=null;try {url = new URL((String) outputs.get("OutputTable1")); return new GenericFileData(url.openStream(),"text/csv");} catch (Exception e) {e.printStackTrace();return null;}}
|
||||
@ComplexDataOutput(abstrakt="Output that is not predetermined", title="NonDeterministicOutput", identifier = "non_deterministic_output", binding = GenericXMLDataBinding.class)
|
||||
public XmlObject getNon_deterministic_output() {return (XmlObject) outputs.get("non_deterministic_output");}
|
||||
@Execute public void run() throws Exception { super.run(); } }
|
|
@ -1,514 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.DatabaseInfo;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.InfrastructureDialoguer;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.TableCoherenceChecker;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IClusterer;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IEvaluator;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IGenerator;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.IModeller;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.ITransducer;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.ComputationData;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.DataspaceManager;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.StoredData;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.n52.wps.algorithm.annotation.Execute;
|
||||
import org.n52.wps.server.AbstractAnnotatedAlgorithm;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
//DONE empty scope is not permitted
|
||||
// DONE integrate transducerers on WPS
|
||||
// DONE test transducerers on WPS
|
||||
// DONE integrate charts
|
||||
// DONE integrate evaluators
|
||||
// DONE integrate clusterers
|
||||
// DONE integrate models
|
||||
// DONE integrate generators
|
||||
// DONE distinguish algorithms according to their belonging package
|
||||
// DONE manage models and generators search
|
||||
// DONE enable generators when the properties file is fulfilled
|
||||
// DONE Manage and test computations on D4Science
|
||||
// DONE Introduce the "user name" parameter for all the computations
|
||||
// DONE memory cache of the database parameters
|
||||
// DONE test HRS on the WPS service
|
||||
// DONE build a checker for the default values
|
||||
// DONE Check algorithms descriptions on the WPS service
|
||||
// DONE Multi-scope GetCapabilities
|
||||
// DONE delete localhost from GetCapabilities
|
||||
// DONE Limit the maximum number of acceptable computations (implementing an
|
||||
// internal queue mechanism)
|
||||
// DONE Add internal parameters: max number of computations and use storage
|
||||
// DONE test with http links from the workspace
|
||||
// DONE delete temporary files
|
||||
// DONE Delete output tables
|
||||
// DONE before deleting a table, check if it is not a system table
|
||||
// DONE add time stamps!
|
||||
// DONE store on storage manager if use storage is enabled: https://gcube.wiki.gcube-system.org/gcube/index.php/URI_Resolver
|
||||
// DONE test of algorithms on the remote machine
|
||||
// DONE solve the security issue on the server when using scope=/d4science.research-infrastructures.eu/gCubeApps
|
||||
// WONTFIX Evaluation by the Taverna team
|
||||
// TODO delete user and scope parameters from the algorithms
|
||||
// TODO include WS links in the output
|
||||
// WONTFIX manage Gis Links
|
||||
// WONTFIX Manage the order of the inputs in the WPS description (currently
|
||||
// TODO manage status
|
||||
// not
|
||||
// supported by 52 N)
|
||||
|
||||
public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm {
|
||||
|
||||
/**
|
||||
* Deploying procedure: 1 - modify configuration files 2 - modify resource file: resources/templates/setup.cfg 3 - generate classes with ClassGenerator 4 - add new classes in the wps_config.xml on the wps web app config folder 5 - produce the Jar file of this project 6 - copy the jar file in the lib folder of the wps web app change the server parameters in the wps_config.xml file
|
||||
*/
|
||||
|
||||
// inputs and outputs
|
||||
public LinkedHashMap<String, Object> inputs = new LinkedHashMap<String, Object>();
|
||||
public LinkedHashMap<String, Object> outputs = new LinkedHashMap<String, Object>();
|
||||
public LinkedHashMap<String, Long> times = new LinkedHashMap<String, Long>();
|
||||
public String startTime;
|
||||
public String endTime;
|
||||
public static HashMap<String, DatabaseInfo> databaseParametersMemoryCache = new HashMap<String, DatabaseInfo>();
|
||||
public static HashMap<String, String> runningcomputations = new HashMap<String, String>();
|
||||
ComputationalAgent agent;
|
||||
public String wpsExternalID = null;
|
||||
ComputationData currentComputation;
|
||||
|
||||
public void setWpsExternalID(String wpsExternalID) {
|
||||
this.wpsExternalID = wpsExternalID;
|
||||
}
|
||||
|
||||
public static synchronized void addComputation(String session, String user) {
|
||||
runningcomputations.put(session, user);
|
||||
}
|
||||
|
||||
public static synchronized void removeComputation(String session) {
|
||||
runningcomputations.remove(session);
|
||||
}
|
||||
|
||||
public static synchronized int getRuningComputations() {
|
||||
return runningcomputations.size();
|
||||
}
|
||||
|
||||
public static synchronized String displayRunningComputations() {
|
||||
return runningcomputations.toString();
|
||||
}
|
||||
|
||||
public void waitForResources() throws Exception {
|
||||
while (getRuningComputations() > ConfigurationManager.getMaxComputations()) {
|
||||
Thread.sleep(2000);
|
||||
AnalysisLogger.getLogger().debug("Waiting for resources to be available: " + displayRunningComputations());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// inner objects
|
||||
public AlgorithmConfiguration config;
|
||||
public InfrastructureDialoguer infrastructureDialoguer;
|
||||
|
||||
public static synchronized DatabaseInfo getDatabaseInfo(String scope) {
|
||||
return databaseParametersMemoryCache.get(scope);
|
||||
}
|
||||
|
||||
public static synchronized void addDatabaseInfo(String scope, DatabaseInfo info) {
|
||||
databaseParametersMemoryCache.put(scope, info);
|
||||
}
|
||||
|
||||
public ComputationalAgent getComputationalAgent(String algorithmName) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Searching for Agents.. " + algorithmName);
|
||||
List<ComputationalAgent> agents = new ArrayList<ComputationalAgent>();
|
||||
|
||||
if (this instanceof ITransducer)
|
||||
agents = TransducerersFactory.getTransducerers(config);
|
||||
else if (this instanceof IClusterer)
|
||||
agents = ClusterersFactory.getClusterers(config);
|
||||
else if (this instanceof IEvaluator)
|
||||
agents = EvaluatorsFactory.getEvaluators(config);
|
||||
else if (this instanceof IGenerator)
|
||||
agents = GeneratorsFactory.getGenerators(config);
|
||||
else if (this instanceof IModeller)
|
||||
agents = ModelersFactory.getModelers(config);
|
||||
|
||||
if (agents != null && agents.size() > 0 && agents.get(0) != null) {
|
||||
AnalysisLogger.getLogger().debug("Found " + agents.size() + " Agents for " + algorithmName);
|
||||
ComputationalAgent agent = agents.get(0);
|
||||
agent.setConfiguration(config);
|
||||
return agent;
|
||||
} else
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<StatisticalType> getInputParameters(String algorithmName) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Searching for Agents Inputs.. " + algorithmName);
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
|
||||
if (this instanceof ITransducer)
|
||||
parameters = TransducerersFactory.getTransducerParameters(config, algorithmName);
|
||||
else if (this instanceof IClusterer)
|
||||
parameters = ClusterersFactory.getClustererParameters(config.getConfigPath(), algorithmName);
|
||||
else if (this instanceof IEvaluator)
|
||||
parameters = EvaluatorsFactory.getEvaluatorParameters(config.getConfigPath(), algorithmName);
|
||||
else if (this instanceof IGenerator)
|
||||
parameters = GeneratorsFactory.getAlgorithmParameters(config.getConfigPath(), algorithmName);
|
||||
else if (this instanceof IModeller)
|
||||
parameters = ModelersFactory.getModelParameters(config.getConfigPath(), algorithmName);
|
||||
|
||||
if (parameters != null) {
|
||||
AnalysisLogger.getLogger().debug("Found " + parameters.size() + " Parameters for " + algorithmName);
|
||||
return parameters;
|
||||
} else
|
||||
return null;
|
||||
}
|
||||
|
||||
public StatisticalType getOutput(String algorithmName) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Searching for Agents Inputs.. " + algorithmName);
|
||||
StatisticalType output = null;
|
||||
|
||||
if (this instanceof ITransducer)
|
||||
output = TransducerersFactory.getTransducerOutput(config, algorithmName);
|
||||
else if (this instanceof IClusterer)
|
||||
output = ClusterersFactory.getClustererOutput(config.getConfigPath(), algorithmName);
|
||||
else if (this instanceof IEvaluator)
|
||||
output = EvaluatorsFactory.getEvaluatorOutput(config.getConfigPath(), algorithmName);
|
||||
else if (this instanceof IGenerator)
|
||||
output = GeneratorsFactory.getAlgorithmOutput(config.getConfigPath(), algorithmName);
|
||||
else if (this instanceof IModeller)
|
||||
output = ModelersFactory.getModelOutput(config.getConfigPath(), algorithmName);
|
||||
|
||||
if (output != null) {
|
||||
AnalysisLogger.getLogger().debug("Found " + output + " for " + algorithmName);
|
||||
return output;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
public void deleteTemporaryTables(List<String> generatedInputTables) throws Exception {
|
||||
|
||||
if (generatedInputTables != null && generatedInputTables.size() > 0) {
|
||||
SessionFactory dbConnection = null;
|
||||
try {
|
||||
dbConnection = DatabaseUtils.initDBSession(config);
|
||||
|
||||
for (String table : generatedInputTables) {
|
||||
if (table != null) {
|
||||
if (TableCoherenceChecker.isSystemTable(table))
|
||||
continue;
|
||||
AnalysisLogger.getLogger().debug("Dropping Temporary Table: " + table);
|
||||
try {
|
||||
DatabaseFactory.executeSQLUpdate("drop table " + table, dbConnection);
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("Could not drop Temporary Table: " + table);
|
||||
}
|
||||
} else
|
||||
AnalysisLogger.getLogger().debug("Could not drop Temporary Table: " + table + " table is null");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
} finally {
|
||||
DatabaseUtils.closeDBConnection(dbConnection);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void deleteGeneratedFiles(List<File> generatedFiles) throws Exception {
|
||||
if (generatedFiles != null) {
|
||||
for (File file : generatedFiles) {
|
||||
if (file.exists()) {
|
||||
AnalysisLogger.getLogger().debug("Deleting File " + file.getAbsolutePath());
|
||||
AnalysisLogger.getLogger().debug("Deleting File Check " + file.delete());
|
||||
} else
|
||||
AnalysisLogger.getLogger().debug("File does not exist " + file.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void manageUserToken() {
|
||||
String scope = null;
|
||||
String username = null;
|
||||
// DONE get scope and username from SmartGears
|
||||
// get scope from SmartGears
|
||||
TokenManager tokenm = new TokenManager();
|
||||
tokenm.getCredentials();
|
||||
scope = tokenm.getScope();
|
||||
username = tokenm.getUserName();
|
||||
// set parameters
|
||||
inputs.put(ConfigurationManager.scopeParameter, scope);
|
||||
inputs.put(ConfigurationManager.usernameParameter, username);
|
||||
}
|
||||
|
||||
long statusInterrupt = 0;
|
||||
float previousStatus = 0;
|
||||
public void updateStatus(float status) {
|
||||
if (agent != null) {
|
||||
long stream = org.n52.wps.server.database.DatabaseFactory.getDatabase().getContentLengthForStoreResponse(wpsExternalID);
|
||||
//AnalysisLogger.getLogger().debug("STATUS bytes " + stream + " interrupt bytes " + statusInterrupt);
|
||||
if (statusInterrupt == 0 || statusInterrupt > stream - 3) {
|
||||
statusInterrupt = stream;
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("STATUS INTERRUPTED!");
|
||||
agent.shutdown();
|
||||
statusInterrupt = -1;
|
||||
agent = null;
|
||||
status = -1f;
|
||||
System.gc();
|
||||
}
|
||||
if (status!=previousStatus){
|
||||
AnalysisLogger.getLogger().debug("STATUS update to:" + status);
|
||||
previousStatus=status;
|
||||
super.update(new Integer((int) status));
|
||||
updateComputationOnWS(status, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void updateComputationOnWS(float status, String exception) {
|
||||
if (currentComputation != null) {
|
||||
currentComputation.setStatus(""+status);
|
||||
if (exception!=null && exception.length()>0)
|
||||
currentComputation.setException(exception);
|
||||
DataspaceManager manager = new DataspaceManager(config, currentComputation, null, null, null);
|
||||
try {
|
||||
manager.writeRunningComputationData();
|
||||
} catch (Exception ez) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Status updater->Impossible to write computation information on the Workspace");
|
||||
AnalysisLogger.getLogger().debug(ez);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Execute
|
||||
public void run() throws Exception {
|
||||
String algorithm = "";
|
||||
List<String> generatedInputTables = null;
|
||||
List<String> generatedOutputTables = null;
|
||||
List<File> generatedFiles = null;
|
||||
String date = new java.text.SimpleDateFormat("dd_MM_yyyy_HH:mm:ss").format(System.currentTimeMillis());
|
||||
String computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + UUID.randomUUID().toString();
|
||||
if (wpsExternalID != null) {
|
||||
AnalysisLogger.getLogger().info("Using wps External ID " + wpsExternalID);
|
||||
computationSession = this.getAlgorithmClass().getSimpleName() + "_ID_" + wpsExternalID;
|
||||
} else
|
||||
AnalysisLogger.getLogger().info("Wps External ID not set");
|
||||
|
||||
try {
|
||||
|
||||
// wait for server resources to be available
|
||||
startTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
||||
time("WPS Algorithm objects Initialization: Session " + computationSession);
|
||||
ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
|
||||
root.setLevel(ch.qos.logback.classic.Level.OFF);
|
||||
|
||||
// set the configuration environment for this algorithm
|
||||
ConfigurationManager configManager = new ConfigurationManager(); // initializes parameters from file
|
||||
manageUserToken();
|
||||
configManager.configAlgorithmEnvironment(inputs);
|
||||
configManager.setComputationId(computationSession);
|
||||
config = configManager.getConfig();
|
||||
AnalysisLogger.getLogger().info("Configured algorithm with session " + computationSession);
|
||||
time("Configuration");
|
||||
waitForResources();
|
||||
AnalysisLogger.getLogger().info("Running algorithm with session " + computationSession);
|
||||
time("Waiting time for resources to be free");
|
||||
// add the computation to the global list of computations
|
||||
addComputation(computationSession, configManager.getUsername() + ":" + configManager.getScope());
|
||||
|
||||
String scope = configManager.getScope();
|
||||
String username = configManager.getUsername();
|
||||
|
||||
AnalysisLogger.getLogger().info("1 - Algorithm environment initialized in scope " + scope + " with user name " + username + " and session " + computationSession);
|
||||
AnalysisLogger.getLogger().info("Max allowed computations " + ConfigurationManager.getMaxComputations() + " using storage " + ConfigurationManager.useStorage());
|
||||
// init the infrastructure dialoguer
|
||||
AnalysisLogger.getLogger().info("2 - Initializing connection to the e-Infrastructure");
|
||||
infrastructureDialoguer = new InfrastructureDialoguer(scope);
|
||||
time("Connection to the e-Infrastructure initialized");
|
||||
// set the database parameters
|
||||
AnalysisLogger.getLogger().info("3 - Initializing connection to the e-Infrastructure central database for computations");
|
||||
DatabaseInfo supportDatabaseInfo = getDatabaseInfo(scope);
|
||||
if (supportDatabaseInfo == null) {
|
||||
supportDatabaseInfo = infrastructureDialoguer.getDatabaseInfo("StatisticalManagerDataBase");
|
||||
addDatabaseInfo(scope, supportDatabaseInfo);
|
||||
} else
|
||||
AnalysisLogger.getLogger().info("Using cached database information: " + supportDatabaseInfo);
|
||||
AnalysisLogger.getLogger().info("Retrieved Central Database: " + supportDatabaseInfo);
|
||||
InputsManager inputsManager = new InputsManager(inputs, config, computationSession);
|
||||
inputsManager.configSupportDatabaseParameters(supportDatabaseInfo);
|
||||
time("Central database information retrieval");
|
||||
// retrieve the algorithm to execute
|
||||
AnalysisLogger.getLogger().info("4 - Retrieving WPS algorithm name");
|
||||
algorithm = this.getAlgorithmClass().getSimpleName();
|
||||
AnalysisLogger.getLogger().debug("Selected Algorithm: " + algorithm);
|
||||
config.setAgent(algorithm);
|
||||
config.setModel(algorithm);
|
||||
time("Ecological Engine Algorithm selection");
|
||||
// adding service parameters to the configuration
|
||||
AnalysisLogger.getLogger().info("5 - Adding Service parameters to the configuration");
|
||||
inputsManager.addInputServiceParameters(getInputParameters(algorithm), infrastructureDialoguer);
|
||||
time("Service parameters added to the algorithm");
|
||||
// merging wps with ecological engine parameters - modifies the
|
||||
// config
|
||||
AnalysisLogger.getLogger().info("6 - Translating WPS Inputs into Ecological Engine Inputs");
|
||||
// build computation Data
|
||||
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0", config.getTaskID(), configManager.getUsername());
|
||||
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo);
|
||||
generatedInputTables = inputsManager.getGeneratedTables();
|
||||
generatedFiles = inputsManager.getGeneratedInputFiles();
|
||||
time("Setup and download of input parameters with tables creation");
|
||||
// retrieve the computational agent given the configuration
|
||||
AnalysisLogger.getLogger().info("7 - Retrieving Ecological Engine algorithm");
|
||||
agent = getComputationalAgent(algorithm);
|
||||
currentComputation.setOperatorDescription(agent.getDescription());
|
||||
currentComputation.setInfrastructure(agent.getInfrastructure().name());
|
||||
AnalysisLogger.getLogger().debug("Found Ecological Engine Algorithm: " + agent);
|
||||
time("Algorithm initialization");
|
||||
// take the a priori declared wps output
|
||||
AnalysisLogger.getLogger().info("8 - Retrieving the a priori output of the algorithm");
|
||||
StatisticalType prioroutput = null;
|
||||
try {
|
||||
prioroutput = getOutput(algorithm);
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().info("Warning: No a priori output for algorithm " + algorithm);
|
||||
}
|
||||
time("A priori output retrieval");
|
||||
// run the computation
|
||||
AnalysisLogger.getLogger().info("9 - Running the computation and updater");
|
||||
runStatusUpdater();
|
||||
agent.init();
|
||||
agent.compute();
|
||||
AnalysisLogger.getLogger().info("The computation has finished. Retrieving output");
|
||||
time("Execution time");
|
||||
// get the a posteriori output
|
||||
AnalysisLogger.getLogger().info("10 - Retrieving the a posteriori output of the algorithm");
|
||||
StatisticalType postoutput = agent.getOutput();
|
||||
AnalysisLogger.getLogger().debug("Computation Output: " + postoutput);
|
||||
time("Output retrieval");
|
||||
// merge the posterior and prior outputs
|
||||
AnalysisLogger.getLogger().info("11 - Merging the a priori and a posteriori output");
|
||||
OutputsManager outputmanager = new OutputsManager(config, computationSession);
|
||||
outputs = outputmanager.createOutput(prioroutput, postoutput);
|
||||
// in the case of storage usage, delete all local files
|
||||
generatedOutputTables = outputmanager.getGeneratedTables();
|
||||
if (ConfigurationManager.useStorage()) {
|
||||
generatedFiles.addAll(outputmanager.getGeneratedFiles());
|
||||
time("Output preparation for WPS document (using storage)");
|
||||
} else
|
||||
time("Output preparation for WPS document (no storage manager)");
|
||||
|
||||
outputmanager.shutdown();
|
||||
|
||||
// delete all temporary tables
|
||||
AnalysisLogger.getLogger().info("12 - Deleting possible generated temporary tables");
|
||||
AnalysisLogger.getLogger().debug("Final Computation Output: " + outputs);
|
||||
|
||||
AnalysisLogger.getLogger().info("12 - Deleting possible generated temporary tables");
|
||||
|
||||
AnalysisLogger.getLogger().debug("All done");
|
||||
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
||||
|
||||
saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent, generatedFiles);
|
||||
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("Error in Algorithm execution: " + algorithm);
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
e.printStackTrace();
|
||||
updateComputationOnWS(-2,e.getMessage());
|
||||
throw e;
|
||||
} finally {
|
||||
AnalysisLogger.getLogger().debug("Deleting Input Tables");
|
||||
deleteTemporaryTables(generatedInputTables);
|
||||
AnalysisLogger.getLogger().debug("Deleting Output Tables");
|
||||
deleteTemporaryTables(generatedOutputTables);
|
||||
// AnalysisLogger.getLogger().debug("Deleting Files");
|
||||
// deleteGeneratedFiles(generatedFiles);
|
||||
// remove this computation from the list
|
||||
removeComputation(computationSession);
|
||||
// cleanResources();
|
||||
time("Cleaning of resources");
|
||||
displayTimes();
|
||||
cleanResources();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class StatusUpdater implements Runnable {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
while (agent != null && agent.getStatus() < 100) {
|
||||
try {
|
||||
updateStatus(agent.getStatus());
|
||||
Thread.sleep(10000);
|
||||
} catch (InterruptedException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
AnalysisLogger.getLogger().info("Status updater terminated");
|
||||
}
|
||||
}
|
||||
|
||||
private void runStatusUpdater() {
|
||||
StatusUpdater updater = new StatusUpdater();
|
||||
|
||||
Thread t = new Thread(updater);
|
||||
t.start();
|
||||
AnalysisLogger.getLogger().debug("Provenance manager running");
|
||||
}
|
||||
|
||||
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent, List<File> generatedFiles) {
|
||||
AnalysisLogger.getLogger().debug("Provenance manager started");
|
||||
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(), agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(),config.getParam(ConfigurationManager.serviceUserNameParameterVariable));
|
||||
// post on WS
|
||||
DataspaceManager manager = new DataspaceManager(config, computation, inputData, outputData, generatedFiles);
|
||||
Thread t = new Thread(manager);
|
||||
t.start();
|
||||
AnalysisLogger.getLogger().debug("Provenance manager running");
|
||||
}
|
||||
|
||||
private void time(String label) {
|
||||
times.put(label, System.currentTimeMillis());
|
||||
}
|
||||
|
||||
private void displayTimes() {
|
||||
AnalysisLogger.getLogger().debug("Times Summary:");
|
||||
AnalysisLogger.getLogger().debug("Label;Elapsed(ms);Time");
|
||||
long prevtime = 0;
|
||||
long inittime = 0;
|
||||
for (String label : times.keySet()) {
|
||||
long currentTime = times.get(label);
|
||||
if (prevtime == 0) {
|
||||
prevtime = currentTime;
|
||||
inittime = currentTime;
|
||||
}
|
||||
AnalysisLogger.getLogger().debug(label + ";" + (currentTime - prevtime) + ";" + new Date(currentTime));
|
||||
prevtime = currentTime;
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Total Elapsed;" + (prevtime - inittime) + ";" + new Date(prevtime));
|
||||
|
||||
}
|
||||
|
||||
private void cleanResources() {
|
||||
times = null;
|
||||
agent = null;
|
||||
System.gc();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,138 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.net.Inet4Address;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Properties;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.n52.wps.commons.WPSConfig;
|
||||
|
||||
public class ConfigurationManager {
|
||||
|
||||
public static String serviceUserNameParameterVariable = "ServiceUserName";
|
||||
public static String processingSessionVariable = "Session";
|
||||
public static String webpathVariable = "WebPath";
|
||||
public static String webPersistencePathVariable = "";
|
||||
public static String usernameParameter = "user.name";
|
||||
public static String scopeParameter = "scope";
|
||||
public static String defaultScope= "/gcube/devsec";
|
||||
public static String defaultUsername= "statistical.wps";
|
||||
|
||||
private static Integer maxComputations = null;
|
||||
private static Boolean useStorage = null;
|
||||
static boolean simulationMode = false;
|
||||
|
||||
public static synchronized Integer getMaxComputations(){
|
||||
return maxComputations;
|
||||
}
|
||||
|
||||
public static synchronized Boolean useStorage(){
|
||||
return useStorage;
|
||||
}
|
||||
|
||||
public static synchronized Boolean isSimulationMode(){
|
||||
return simulationMode;
|
||||
}
|
||||
|
||||
public void getInitializationProperties() {
|
||||
try {
|
||||
if (maxComputations == null) {
|
||||
Properties options = new Properties();
|
||||
InputStream is = this.getClass().getClassLoader().getResourceAsStream("templates/setup.cfg");
|
||||
options.load(is);
|
||||
is.close();
|
||||
maxComputations = Integer.parseInt(options.getProperty("maxcomputations"));
|
||||
useStorage = Boolean.parseBoolean(options.getProperty("saveond4sstorage"));
|
||||
simulationMode=Boolean.parseBoolean(options.getProperty("simulationMode"));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private AlgorithmConfiguration config;
|
||||
private String scope;
|
||||
private String username;
|
||||
|
||||
public String getScope() {
|
||||
return scope;
|
||||
}
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public ConfigurationManager() {
|
||||
getInitializationProperties();
|
||||
}
|
||||
|
||||
public AlgorithmConfiguration getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
public void setComputationId(String computationId){
|
||||
config.setTaskID(computationId);
|
||||
}
|
||||
|
||||
public void configAlgorithmEnvironment(LinkedHashMap<String, Object> inputs) throws Exception {
|
||||
// set config container
|
||||
config = new AlgorithmConfiguration();
|
||||
String webperspath = WPSConfig.getConfigDir() + "../persistence/";
|
||||
// selecting persistence path
|
||||
// String persistencePath = File.createTempFile("wpsstatcheck", ".sm").getParent() + "/../cfg/";
|
||||
String persistencePath = WPSConfig.getConfigDir() + "../ecocfg/";
|
||||
String configPath = persistencePath;
|
||||
if (!new File(configPath).isDirectory()) {
|
||||
configPath = "./cfg/";
|
||||
persistencePath = "./";
|
||||
}
|
||||
System.out.println("Taking configuration from " + (new File(configPath).getAbsolutePath()) + " and persistence in " + persistencePath);
|
||||
// setting configuration and logger
|
||||
config.setPersistencePath(persistencePath);
|
||||
config.setConfigPath(configPath);
|
||||
config.setNumberOfResources(1);
|
||||
AnalysisLogger.setLogger(configPath + "/" + AlgorithmConfiguration.defaultLoggerFile);
|
||||
AnalysisLogger.getLogger().debug("Taking configuration from " + configPath + " and persistence in " + persistencePath);
|
||||
// setting application paths
|
||||
String webapp = WPSConfig.getInstance().getWPSConfig().getServer().getWebappPath();
|
||||
if (webapp == null)
|
||||
webapp = "wps";
|
||||
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
||||
if (host.toLowerCase().equals("localhost"))
|
||||
host = Inet4Address.getLocalHost().getHostAddress();
|
||||
String port = WPSConfig.getInstance().getWPSConfig().getServer().getHostport();
|
||||
AnalysisLogger.getLogger().debug("Host: " + host + " Port: " + port + " Webapp: " + webapp + " ");
|
||||
AnalysisLogger.getLogger().debug("Web persistence path: " + webperspath);
|
||||
|
||||
String webPath = "http://" + host + ":" + port + "/" + webapp + "/persistence/";
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Env Vars: \n"+System.getenv());
|
||||
AnalysisLogger.getLogger().debug("Web app path: " + webPath);
|
||||
|
||||
// retrieving scope
|
||||
scope = (String) inputs.get(scopeParameter);
|
||||
AnalysisLogger.getLogger().debug("Retrieved scope: " + scope);
|
||||
if (scope == null)
|
||||
throw new Exception("Error: scope parameter (scope) not set! This violates e-Infrastructure security policies");
|
||||
if (!scope.startsWith("/"))
|
||||
scope = "/" + scope;
|
||||
|
||||
username = (String) inputs.get(usernameParameter);
|
||||
AnalysisLogger.getLogger().debug("User name used by the client: " + username);
|
||||
if (username == null || username.trim().length() == 0)
|
||||
throw new Exception("Error: user name parameter (user.name) not set! This violates e-Infrastructure security policies");
|
||||
|
||||
config.setGcubeScope(scope);
|
||||
// DONE get username from request
|
||||
config.setParam(serviceUserNameParameterVariable, username);
|
||||
config.setParam(processingSessionVariable, "" + UUID.randomUUID());
|
||||
config.setParam(webpathVariable, webPath);
|
||||
config.setParam(webPersistencePathVariable, webperspath);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
|
||||
public class IOWPSInformation {
|
||||
|
||||
private String name;
|
||||
private String abstractStr;
|
||||
private String allowed;
|
||||
private String defaultVal;
|
||||
private String localMachineContent;
|
||||
private String content;
|
||||
private String mimetype;
|
||||
private String classname;
|
||||
|
||||
public String getClassname() {
|
||||
return classname;
|
||||
}
|
||||
public void setClassname(String classname) {
|
||||
this.classname = classname;
|
||||
}
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
public String getAbstractStr() {
|
||||
return abstractStr;
|
||||
}
|
||||
public void setAbstractStr(String abstractStr) {
|
||||
this.abstractStr = abstractStr;
|
||||
}
|
||||
public String getAllowed() {
|
||||
return allowed;
|
||||
}
|
||||
public void setAllowed(String allowed) {
|
||||
this.allowed = allowed;
|
||||
}
|
||||
public String getDefaultVal() {
|
||||
return defaultVal;
|
||||
}
|
||||
public void setDefaultVal(String defaultVal) {
|
||||
this.defaultVal = defaultVal;
|
||||
}
|
||||
public String getContent() {
|
||||
return content;
|
||||
/*
|
||||
try{
|
||||
return URLEncoder.encode(content,"UTF-8");
|
||||
}catch(Exception e){
|
||||
return content;
|
||||
}
|
||||
*/
|
||||
}
|
||||
public void setContent(String content) {
|
||||
if (content!=null && content.startsWith("http"))
|
||||
content = content.replace(" ", "%20");
|
||||
|
||||
this.content = content;
|
||||
}
|
||||
public String getMimetype() {
|
||||
return mimetype;
|
||||
}
|
||||
public void setMimetype(String mimetype) {
|
||||
this.mimetype = mimetype;
|
||||
}
|
||||
public String getLocalMachineContent() {
|
||||
return localMachineContent;
|
||||
}
|
||||
public void setLocalMachineContent(String localMachineContent) {
|
||||
this.localMachineContent = localMachineContent;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,424 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.DatabaseInfo;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.infrastructure.InfrastructureDialoguer;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.DataProvenance;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.StoredData;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.StoredType;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.GML2CSV;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.n52.wps.io.data.GenericFileData;
|
||||
|
||||
public class InputsManager {
|
||||
LinkedHashMap<String, Object> inputs;
|
||||
List<String> generatedTables;
|
||||
List<File> generatedFiles;
|
||||
HashMap<String, String> inputTableTemplates = new HashMap<String, String>();
|
||||
AlgorithmConfiguration config;
|
||||
String computationId;
|
||||
|
||||
List<StoredData> provenanceData = new ArrayList<StoredData>();
|
||||
public List<StoredData> getProvenanceData() {
|
||||
return provenanceData;
|
||||
}
|
||||
|
||||
|
||||
public static String inputsSeparator = "\\|";
|
||||
|
||||
public AlgorithmConfiguration getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
public InputsManager(LinkedHashMap<String, Object> inputs, AlgorithmConfiguration config, String computationId) {
|
||||
this.inputs = inputs;
|
||||
this.config = config;
|
||||
this.computationId = computationId;
|
||||
generatedTables = new ArrayList<String>();
|
||||
generatedFiles = new ArrayList<File>();
|
||||
}
|
||||
|
||||
public List<String> getGeneratedTables() {
|
||||
return generatedTables;
|
||||
}
|
||||
|
||||
public List<File> getGeneratedInputFiles() {
|
||||
return generatedFiles;
|
||||
}
|
||||
|
||||
public void configSupportDatabaseParameters(DatabaseInfo supportDatabaseInfo) throws Exception {
|
||||
// retrieving database parameters
|
||||
config.setDatabaseDialect(supportDatabaseInfo.dialect);
|
||||
config.setDatabaseDriver(supportDatabaseInfo.driver);
|
||||
config.setDatabasePassword(supportDatabaseInfo.password);
|
||||
config.setDatabaseURL(supportDatabaseInfo.url);
|
||||
config.setDatabaseUserName(supportDatabaseInfo.username);
|
||||
// assigning database variables
|
||||
config.setParam("DatabaseDriver", supportDatabaseInfo.driver);
|
||||
config.setParam("DatabaseUserName", supportDatabaseInfo.username);
|
||||
config.setParam("DatabasePassword", supportDatabaseInfo.password);
|
||||
config.setParam("DatabaseURL", supportDatabaseInfo.url);
|
||||
}
|
||||
|
||||
public void mergeWpsAndEcologicalInputs(DatabaseInfo supportDatabaseInfo) throws Exception {
|
||||
// browse input parameters from WPS
|
||||
for (String inputName : inputs.keySet()) {
|
||||
Object input = inputs.get(inputName);
|
||||
AnalysisLogger.getLogger().debug("Managing Input Parameter with Name "+ inputName);
|
||||
// case of simple input
|
||||
if (input instanceof String) {
|
||||
AnalysisLogger.getLogger().debug("Simple Input: "+ input);
|
||||
// manage lists
|
||||
String inputAlgo = ((String) input).trim().replaceAll(inputsSeparator, AlgorithmConfiguration.listSeparator);
|
||||
AnalysisLogger.getLogger().debug("Simple Input Transformed: " + inputAlgo);
|
||||
config.setParam(inputName, inputAlgo);
|
||||
|
||||
saveInputData(inputName,inputName,inputAlgo);
|
||||
}
|
||||
// case of Complex Input
|
||||
else if (input instanceof GenericFileData) {
|
||||
|
||||
AnalysisLogger.getLogger().debug("Complex Input: " + input);
|
||||
// retrieve payload
|
||||
GenericFileData files = ((GenericFileData) input);
|
||||
List<File> localfiles = getLocalFiles(files);
|
||||
String inputtables = "";
|
||||
int nfiles = localfiles.size();
|
||||
for (int i = 0; i < nfiles; i++) {
|
||||
File tableFile = localfiles.get(i);
|
||||
generatedFiles.add(tableFile);
|
||||
|
||||
String tableName = ("wps_" + ("" + UUID.randomUUID()).replace("_", "")).replace("-", "");
|
||||
// create the table
|
||||
|
||||
if (inputTableTemplates.get(inputName) != null) {
|
||||
AnalysisLogger.getLogger().debug("Creating table: " + tableName);
|
||||
createTable(tableName, tableFile, config, supportDatabaseInfo, inputTableTemplates.get(inputName));
|
||||
generatedTables.add(tableName);
|
||||
}
|
||||
//case of non-table input file, e.g. FFANN
|
||||
else
|
||||
tableName = tableFile.getAbsolutePath();
|
||||
if (i > 0)
|
||||
inputtables = inputtables + AlgorithmConfiguration.getListSeparator();
|
||||
|
||||
inputtables += tableName;
|
||||
|
||||
saveInputData(inputName, inputName, tableFile.getAbsolutePath());
|
||||
}
|
||||
// the only possible complex input is a table - check the WPS
|
||||
// parsers
|
||||
config.setParam(inputName, inputtables);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public boolean isXML(String fileContent){
|
||||
|
||||
if (fileContent.startsWith("<"))
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
public String readOneLine(String filename){
|
||||
|
||||
try {
|
||||
BufferedReader in = new BufferedReader(new FileReader(new File(filename)));
|
||||
String line = null;
|
||||
String vud = "";
|
||||
|
||||
while ((line = in.readLine()) != null) {
|
||||
if (line.trim().length()>0){
|
||||
vud = line.trim();
|
||||
break;
|
||||
}
|
||||
}
|
||||
in.close();
|
||||
return vud;
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public List<File> getLocalFiles(GenericFileData files) throws Exception {
|
||||
|
||||
// download input
|
||||
List<File> filesList = new ArrayList<File>();
|
||||
File f = files.getBaseFile(false);
|
||||
AnalysisLogger.getLogger().debug("Retrieving file content as a URL link: " + f.getAbsolutePath());
|
||||
//TODO DO NOT READ FILE INTO MEMORY
|
||||
// read file content
|
||||
String fileLink = readOneLine(f.getAbsolutePath());
|
||||
AnalysisLogger.getLogger().debug("File link: " + fileLink.substring(0,Math.min(fileLink.length(),10)) + "...");
|
||||
String fileName = "";
|
||||
// case of a http link
|
||||
if (fileLink.toLowerCase().startsWith("http:") || fileLink.toLowerCase().startsWith("https:")) {
|
||||
// manage the case of multiple files
|
||||
String[] remotefiles = fileLink.split(inputsSeparator);
|
||||
for (String subfilelink : remotefiles) {
|
||||
subfilelink = subfilelink.trim();
|
||||
AnalysisLogger.getLogger().debug("Managing link: " + subfilelink);
|
||||
if (subfilelink.length() == 0)
|
||||
continue;
|
||||
InputStream is = null;
|
||||
HttpURLConnection urlConnection = null;
|
||||
URL url = new URL(subfilelink);
|
||||
urlConnection = (HttpURLConnection) url.openConnection();
|
||||
is = new BufferedInputStream(urlConnection.getInputStream());
|
||||
// retrieve payload: for test purpose only
|
||||
fileName = subfilelink.substring(subfilelink.lastIndexOf("/") + 1).trim();
|
||||
if (fileName.contains("."))
|
||||
fileName = fileName.substring(0, fileName.lastIndexOf(".")) + UUID.randomUUID() + fileName.substring(fileName.lastIndexOf("."));
|
||||
else
|
||||
fileName = fileName + UUID.randomUUID();
|
||||
|
||||
AnalysisLogger.getLogger().debug("Retrieving remote input in file: " + fileName);
|
||||
AnalysisLogger.getLogger().debug("Creating local temp file: " + fileName);
|
||||
File of = new File(config.getPersistencePath(), fileName);
|
||||
FileOutputStream fos = new FileOutputStream(of);
|
||||
IOUtils.copy(is, fos);
|
||||
is.close();
|
||||
fos.close();
|
||||
urlConnection.disconnect();
|
||||
filesList.add(of);
|
||||
AnalysisLogger.getLogger().debug("Created local file: " + of.getAbsolutePath());
|
||||
}
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("Complex Input payload is the filelink");
|
||||
fileName = f.getName();
|
||||
AnalysisLogger.getLogger().debug("Retriving local input from file: " + fileName);
|
||||
|
||||
//since this is a GenericFile we will suppose it is a csv file
|
||||
if (isXML(fileLink))
|
||||
{
|
||||
String xmlFile = f.getAbsolutePath();
|
||||
String csvFile = xmlFile+".csv";
|
||||
AnalysisLogger.getLogger().debug("Transforming XML file into a csv: " + csvFile);
|
||||
GML2CSV.parseGML(xmlFile, csvFile);
|
||||
AnalysisLogger.getLogger().debug("GML Parsed: " + readOneLine(csvFile)+"[..]");
|
||||
f = new File(csvFile);
|
||||
}
|
||||
else
|
||||
AnalysisLogger.getLogger().debug("The file is a csv: " + f.getAbsolutePath());
|
||||
filesList.add(f);
|
||||
|
||||
}
|
||||
|
||||
return filesList;
|
||||
}
|
||||
|
||||
public void createTable(String tableName, File tableFile, AlgorithmConfiguration config, DatabaseInfo supportDatabaseInfo, String inputTableTemplate) throws Exception {
|
||||
|
||||
// creating table
|
||||
AnalysisLogger.getLogger().debug("Complex Input size after download: " + tableFile.length());
|
||||
if (tableFile.length() == 0)
|
||||
throw new Exception("Error: the Input file is empty");
|
||||
|
||||
AnalysisLogger.getLogger().debug("Creating table from file: " + tableFile.getAbsolutePath());
|
||||
|
||||
SessionFactory dbConnection = null;
|
||||
try {
|
||||
dbConnection = DatabaseUtils.initDBSession(config);
|
||||
BufferedReader br = new BufferedReader(new FileReader(tableFile));
|
||||
String header = br.readLine();
|
||||
br.close();
|
||||
|
||||
AnalysisLogger.getLogger().debug("File header: " + header);
|
||||
String templatename = inputTableTemplate;
|
||||
AnalysisLogger.getLogger().debug("Suggested Template: " + templatename);
|
||||
String tableStructure = suggestTableStructure(header, templatename);
|
||||
AnalysisLogger.getLogger().debug("Extracted table structure: " + tableStructure);
|
||||
if (tableStructure == null)
|
||||
throw new Exception("Input table is not compliant to the required structure");
|
||||
TableTemplatesMapper mapper = new TableTemplatesMapper();
|
||||
String createstatement = mapper.generateCreateStatement(tableName, templatename, tableStructure);
|
||||
AnalysisLogger.getLogger().debug("Creating table: " + tableName);
|
||||
DatabaseUtils.createBigTable(true, tableName, supportDatabaseInfo.driver, supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url, createstatement, dbConnection);
|
||||
DatabaseUtils.createRemoteTableFromFile(tableFile.getAbsolutePath(), tableName, ",", true, supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url);
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug("Error in database transaction " + e.getLocalizedMessage());
|
||||
throw new Exception("Error in creating the table for " + tableName + ": " + e.getLocalizedMessage());
|
||||
} finally {
|
||||
DatabaseUtils.closeDBConnection(dbConnection);
|
||||
}
|
||||
}
|
||||
|
||||
public String suggestTableStructure(String header, String templatename) {
|
||||
TableTemplatesMapper mapper = new TableTemplatesMapper();
|
||||
String variablesString = mapper.varsMap.get(templatename);
|
||||
String[] headersVector = header.split(",");
|
||||
String[] variables = variablesString.split(",");
|
||||
boolean check = true;
|
||||
HashMap<String, String> definitionsMap = new HashMap<String, String>();
|
||||
for (String var : variables) {
|
||||
var = var.trim();
|
||||
if (var.contains("<")) {
|
||||
check = false;
|
||||
}
|
||||
|
||||
if (check) {
|
||||
String varname = var.substring(0, var.indexOf(" "));
|
||||
boolean found = false;
|
||||
for (String headvar : headersVector) {
|
||||
if (headvar.trim().equalsIgnoreCase(varname)) {
|
||||
definitionsMap.put(headvar.trim(), var);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found)
|
||||
return null;
|
||||
}
|
||||
if (var.contains(">")) {
|
||||
check = true;
|
||||
}
|
||||
}
|
||||
|
||||
StringBuffer structure = new StringBuffer();
|
||||
int counter = 0;
|
||||
for (String headvar : headersVector) {
|
||||
String def = definitionsMap.get(headvar);
|
||||
if (def == null)
|
||||
structure.append(headvar + " character varying");
|
||||
else
|
||||
structure.append(def);
|
||||
if (counter < headersVector.length - 1)
|
||||
structure.append(", ");
|
||||
|
||||
counter++;
|
||||
}
|
||||
|
||||
return structure.toString();
|
||||
}
|
||||
|
||||
public void addInputServiceParameters(List<StatisticalType> agentInputs, InfrastructureDialoguer infrastructureDialoguer) throws Exception {
|
||||
|
||||
// check and fullfil additional parameters
|
||||
DatabaseInfo dbinfo = null;
|
||||
inputTableTemplates = new HashMap<String, String>();
|
||||
|
||||
for (StatisticalType type : agentInputs) {
|
||||
if (type instanceof PrimitiveType) {
|
||||
if (((PrimitiveType) type).getType()==PrimitiveTypes.CONSTANT){
|
||||
String constant = ""+((PrimitiveType) type).getDefaultValue();
|
||||
config.setParam(type.getName(), constant);
|
||||
AnalysisLogger.getLogger().debug("Constant parameter: "+constant);
|
||||
}
|
||||
}
|
||||
if (type instanceof ServiceType) {
|
||||
ServiceType stype = (ServiceType) type;
|
||||
AnalysisLogger.getLogger().debug("Found ServiceType Input: " + stype);
|
||||
String name = stype.getName();
|
||||
AnalysisLogger.getLogger().debug("ServiceType Input Name: " + name);
|
||||
ServiceParameters sp = stype.getServiceParameter();
|
||||
AnalysisLogger.getLogger().debug("ServiceType Parameter: " + sp);
|
||||
String value = "";
|
||||
if (sp == ServiceParameters.RANDOMSTRING)
|
||||
value = "stat" + UUID.randomUUID().toString().replace("-", "");
|
||||
else if (sp == ServiceParameters.USERNAME){
|
||||
value = (String) inputs.get(ConfigurationManager.usernameParameter);
|
||||
|
||||
AnalysisLogger.getLogger().debug("User name used by the client: "+value);
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("ServiceType Adding: (" + name + "," + value + ")");
|
||||
config.setParam(name, value);
|
||||
} else if (type instanceof DatabaseType) {
|
||||
DatabaseType dtype = (DatabaseType) type;
|
||||
String name = dtype.getName();
|
||||
AnalysisLogger.getLogger().debug("Found DatabaseType Input: " + dtype + " with name " + name);
|
||||
DatabaseParameters parameter = dtype.getDatabaseParameter();
|
||||
AnalysisLogger.getLogger().debug("DatabaseType Input Parameter: " + parameter);
|
||||
if (parameter == DatabaseParameters.REMOTEDATABASERRNAME) {
|
||||
dbinfo = infrastructureDialoguer.getDatabaseInfo(name);
|
||||
AnalysisLogger.getLogger().debug("Requesting remote database name: " + name);
|
||||
} else if (parameter == DatabaseParameters.REMOTEDATABASEDIALECT) {
|
||||
config.setParam(name, dbinfo.dialect);
|
||||
AnalysisLogger.getLogger().debug("Extracted db dialect: " + dbinfo.dialect);
|
||||
} else if (parameter == DatabaseParameters.REMOTEDATABASEDRIVER) {
|
||||
config.setParam(name, dbinfo.driver);
|
||||
AnalysisLogger.getLogger().debug("Extracted db driver: " + dbinfo.driver);
|
||||
} else if (parameter == DatabaseParameters.REMOTEDATABASEPASSWORD) {
|
||||
config.setParam(name, dbinfo.password);
|
||||
AnalysisLogger.getLogger().debug("Extracted db password: " + dbinfo.password);
|
||||
} else if (parameter == DatabaseParameters.REMOTEDATABASEUSERNAME) {
|
||||
config.setParam(name, dbinfo.username);
|
||||
AnalysisLogger.getLogger().debug("Extracted db username: " + dbinfo.username);
|
||||
} else if (parameter == DatabaseParameters.REMOTEDATABASEURL) {
|
||||
config.setParam(name, dbinfo.url);
|
||||
AnalysisLogger.getLogger().debug("Extracted db url: " + dbinfo.url);
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("DatabaseType Input Parameter Managed");
|
||||
} else if (type instanceof InputTable) {
|
||||
String name = type.getName();
|
||||
inputTableTemplates.put(name, ((InputTable) type).getTemplateNames().get(0).name());
|
||||
} else if (type instanceof TablesList) {
|
||||
String name = type.getName();
|
||||
inputTableTemplates.put(name, ((TablesList) type).getTemplates().get(0).name());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void saveInputData(String name, String description, String payload){
|
||||
String id = name;
|
||||
DataProvenance provenance = DataProvenance.IMPORTED;
|
||||
String creationDate = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
||||
String operator = config.getAgent();
|
||||
|
||||
StoredType type = StoredType.STRING;
|
||||
|
||||
if (payload != null && (new File (payload).exists())) {
|
||||
type = StoredType.DATA;
|
||||
}
|
||||
|
||||
StoredData data = new StoredData(name, description, id, provenance, creationDate, operator, computationId, type,payload);
|
||||
|
||||
provenanceData.add(data);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -1,294 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.xmlbeans.XmlObject;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.MemoryType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.DataProvenance;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.DataspaceManager;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.StoredData;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace.StoredType;
|
||||
|
||||
public class OutputsManager {
|
||||
|
||||
private AlgorithmConfiguration config;
|
||||
|
||||
private List<File> generatedFiles = new ArrayList<File>();
|
||||
private List<String> generatedTables = new ArrayList<String>();
|
||||
private IClient storageclient;
|
||||
private String computationsession;
|
||||
private List<StoredData> provenanceData = new ArrayList<StoredData>();
|
||||
|
||||
public List<StoredData> getProvenanceData() {
|
||||
return provenanceData;
|
||||
}
|
||||
|
||||
public List<File> getGeneratedData() {
|
||||
return generatedFiles;
|
||||
}
|
||||
|
||||
public List<File> getGeneratedFiles() {
|
||||
return generatedFiles;
|
||||
}
|
||||
|
||||
public List<String> getGeneratedTables() {
|
||||
return generatedTables;
|
||||
}
|
||||
|
||||
public OutputsManager(AlgorithmConfiguration config,String computationsession) {
|
||||
this.config = config;
|
||||
this.computationsession=computationsession;
|
||||
}
|
||||
|
||||
public LinkedHashMap<String, Object> createOutput(StatisticalType prioroutput, StatisticalType posterioroutput) throws Exception {
|
||||
|
||||
LinkedHashMap<String, Object> outputs = new LinkedHashMap<String, Object>();
|
||||
|
||||
AnalysisLogger.getLogger().debug("Converting prior output into WPS output");
|
||||
StatisticalTypeToWPSType converter = new StatisticalTypeToWPSType();
|
||||
converter.convert2WPSType(prioroutput, false, config);
|
||||
generatedFiles.addAll(converter.getGeneratedFiles());
|
||||
generatedTables.addAll(converter.getGeneratedTables());
|
||||
LinkedHashMap<String, IOWPSInformation> priorOutput = converter.outputSet;
|
||||
|
||||
AnalysisLogger.getLogger().debug("Converting posterior output into WPS output");
|
||||
StatisticalTypeToWPSType postconverter = new StatisticalTypeToWPSType();
|
||||
postconverter.convert2WPSType(posterioroutput, false, config);
|
||||
generatedFiles.addAll(postconverter.getGeneratedFiles());
|
||||
AnalysisLogger.getLogger().debug("Generated Files "+generatedFiles);
|
||||
generatedTables.addAll(postconverter.getGeneratedTables());
|
||||
AnalysisLogger.getLogger().debug("Generated Tables "+generatedFiles);
|
||||
|
||||
LinkedHashMap<String, IOWPSInformation> postOutput = postconverter.outputSet;
|
||||
|
||||
LinkedHashMap<String, IOWPSInformation> ndoutput = new LinkedHashMap<String, IOWPSInformation>();
|
||||
// merging a priori and a posteriori output
|
||||
AnalysisLogger.getLogger().debug("Merging prior and posterior output");
|
||||
if (ConfigurationManager.useStorage())
|
||||
prepareForStoring();
|
||||
|
||||
for (String okey : postOutput.keySet()) {
|
||||
AnalysisLogger.getLogger().debug("Assigning output: " + okey + " to the expected output");
|
||||
IOWPSInformation postInfo = postOutput.get(okey);
|
||||
// search for the best prior matching the output
|
||||
IOWPSInformation info = priorOutput.get(okey);
|
||||
if (info == null) {
|
||||
// if the output was not defined a priori occupy a suitable slot
|
||||
// if not yet occupied
|
||||
for (String priorPName : priorOutput.keySet()) {
|
||||
// check if the slot for this output had been yet occupied
|
||||
if (outputs.get(priorPName) == null && priorPName.startsWith(postInfo.getClassname())) {
|
||||
okey = priorPName;
|
||||
info = priorOutput.get(priorPName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// this check filters out the containers of sub elements
|
||||
if (postInfo != null && postInfo.getContent() != null) {
|
||||
if (ConfigurationManager.useStorage()) {
|
||||
if (postInfo.getLocalMachineContent() != null) {
|
||||
// return the url from storage manager
|
||||
String storageurl = uploadFileOnStorage(postInfo.getLocalMachineContent(), postInfo.getMimetype());
|
||||
postInfo.setContent(storageurl);
|
||||
}
|
||||
}
|
||||
/*
|
||||
else if (postInfo.getLocalMachineContent() != null) {
|
||||
String url = "<wps:Reference mimeType=\""+postInfo.getMimetype()+"\" xlink:href=\""+postInfo.getContent()+"\" method=\"GET\"/>";
|
||||
AnalysisLogger.getLogger().debug("Reference URL: " + url);
|
||||
outputs.put(okey, url);
|
||||
}
|
||||
else*/
|
||||
if (info != null) {
|
||||
AnalysisLogger.getLogger().debug("Found a corresponding output: " + okey);
|
||||
outputs.put(okey, postInfo.getContent());
|
||||
//add link to the file also among the non deterministic output
|
||||
if (postInfo.getLocalMachineContent() != null) {
|
||||
ndoutput.put(okey, postInfo);
|
||||
}
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("Output was not expected: " + okey);
|
||||
ndoutput.put(okey, postInfo);
|
||||
}
|
||||
saveProvenanceData(postInfo);
|
||||
}
|
||||
|
||||
System.gc();
|
||||
}
|
||||
|
||||
XmlObject ndxml = generateNonDeterministicOutput(ndoutput);
|
||||
outputs.put("non_deterministic_output", ndxml);
|
||||
|
||||
//safety check for declared output, i.e. a priori output
|
||||
for (String pkey:priorOutput.keySet()){
|
||||
if (outputs.get(pkey)==null){
|
||||
AnalysisLogger.getLogger().debug("Safety check: adding empty string for " + pkey+ " of type "+priorOutput.get(pkey).getClassname());
|
||||
outputs.put(pkey, "");
|
||||
}
|
||||
}
|
||||
|
||||
return outputs;
|
||||
}
|
||||
|
||||
|
||||
private void saveProvenanceData(IOWPSInformation info){
|
||||
String name = info.getName();
|
||||
String id = info.getName();
|
||||
DataProvenance provenance = DataProvenance.COMPUTED;
|
||||
String creationDate = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
||||
String operator = config.getAgent();
|
||||
String computationId = computationsession;
|
||||
StoredType type = StoredType.STRING;
|
||||
if (info.getLocalMachineContent() != null) {
|
||||
type = StoredType.DATA;
|
||||
}
|
||||
String payload = info.getContent();
|
||||
|
||||
StoredData data = new StoredData(name, info.getAbstractStr(),id, provenance, creationDate, operator, computationId, type,payload);
|
||||
|
||||
provenanceData.add(data);
|
||||
}
|
||||
|
||||
private void prepareForStoring() {
|
||||
AnalysisLogger.getLogger().debug("Preparing storage client");
|
||||
String scope = config.getGcubeScope();
|
||||
ScopeProvider.instance.set(scope);
|
||||
String serviceClass = "WPS";
|
||||
String serviceName = "wps.synch";
|
||||
String owner = config.getParam(ConfigurationManager.serviceUserNameParameterVariable);
|
||||
storageclient = new StorageClient(serviceClass, serviceName, owner, AccessType.SHARED, MemoryType.VOLATILE).getClient();
|
||||
AnalysisLogger.getLogger().debug("Storage client ready");
|
||||
}
|
||||
|
||||
|
||||
private String uploadFileOnStorage(String localfile, String mimetype) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Start uploading on storage the following file: " + localfile);
|
||||
File localFile = new File(localfile);
|
||||
String remotef = "/wps_synch_output/" +config.getAgent()+"/"+computationsession+"/"+ localFile.getName();
|
||||
storageclient.put(true).LFile(localfile).RFile(remotef);
|
||||
|
||||
String url = storageclient.getHttpUrl().RFile(remotef);
|
||||
|
||||
/*
|
||||
if (config.getGcubeScope().startsWith("/gcube"))
|
||||
url = "http://data-d.d4science.org/uri-resolver/smp?smp-uri=" + url + "&fileName=" + localFile.getName() + "&contentType=" + mimetype;
|
||||
else
|
||||
url = "http://data.d4science.org/uri-resolver/smp?smp-uri=" + url+ "&fileName=" + localFile.getName() + "&contentType=" + mimetype;
|
||||
*/
|
||||
AnalysisLogger.getLogger().info("Uploading finished - URL: " + url);
|
||||
return url;
|
||||
|
||||
}
|
||||
|
||||
public String cleanTagString(String tag) {
|
||||
return tag.replace(" ", "_").replaceAll("[\\]\\[!\"#$%&'()*+,\\./:;<=>?@\\^`{|}~-]", "");
|
||||
}
|
||||
|
||||
public XmlObject generateNonDeterministicOutputPlain(LinkedHashMap<String, IOWPSInformation> ndoutput) throws Exception {
|
||||
String XMLString = "<gml:featureMember xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">\n" + " <d4science:output fid=\"outputcollection\">\n";
|
||||
for (String key : ndoutput.keySet()) {
|
||||
IOWPSInformation info = ndoutput.get(key);
|
||||
String payload = info.getContent();
|
||||
String mimetype = info.getMimetype();
|
||||
XMLString += " <d4science:k_" + cleanTagString(key) + ">" + " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </d4science:k_" + cleanTagString(key) + ">\n";
|
||||
}
|
||||
XMLString += " </d4science:output>\n" + "</gml:featureMember>\n";
|
||||
|
||||
AnalysisLogger.getLogger().debug("Non deterministic output: " + XMLString);
|
||||
|
||||
XmlObject xmlData = XmlObject.Factory.newInstance();
|
||||
ByteArrayInputStream xstream = new ByteArrayInputStream(XMLString.getBytes());
|
||||
xmlData = XmlObject.Factory.parse(xstream);
|
||||
AnalysisLogger.getLogger().debug("Output has been correctly parsed");
|
||||
|
||||
return xmlData;
|
||||
}
|
||||
|
||||
public XmlObject generateNonDeterministicOutputCollection(LinkedHashMap<String, IOWPSInformation> ndoutput) throws Exception {
|
||||
String XMLString = "<ogr:FeatureCollection xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://ogr.maptools.org/ result_8751.xsd\" xmlns:ogr=\"http://ogr.maptools.org/\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">" +
|
||||
"\n<gml:featureMember>\n" + " <ogr:Result fid=\"F0\">\n" +
|
||||
" <d4science:output fid=\"outputcollection\">\n";
|
||||
|
||||
for (String key : ndoutput.keySet()) {
|
||||
IOWPSInformation info = ndoutput.get(key);
|
||||
String payload = info.getContent();
|
||||
String mimetype = info.getMimetype();
|
||||
XMLString += " <d4science:k_" + cleanTagString(key) + ">" + " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </d4science:k_" + cleanTagString(key) + ">\n";
|
||||
}
|
||||
XMLString += " </d4science:output>\n" + " </ogr:Result>\n</gml:featureMember>\n</ogr:FeatureCollection>";
|
||||
|
||||
AnalysisLogger.getLogger().debug("Non deterministic output: " + XMLString);
|
||||
|
||||
XmlObject xmlData = XmlObject.Factory.newInstance();
|
||||
ByteArrayInputStream xstream = new ByteArrayInputStream(XMLString.getBytes());
|
||||
xmlData = XmlObject.Factory.parse(xstream);
|
||||
AnalysisLogger.getLogger().debug("Output has been correctly parsed");
|
||||
|
||||
return xmlData;
|
||||
}
|
||||
|
||||
public XmlObject generateNonDeterministicOutput(LinkedHashMap<String, IOWPSInformation> ndoutput) throws Exception {
|
||||
|
||||
if (ndoutput.size()==0)
|
||||
return null;
|
||||
|
||||
String XMLString = "<ogr:FeatureCollection xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://ogr.maptools.org/ result_8751.xsd\" xmlns:ogr=\"http://ogr.maptools.org/\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">" +
|
||||
"\n<gml:featureMember>\n";
|
||||
int count = 0;
|
||||
for (String key : ndoutput.keySet()) {
|
||||
IOWPSInformation info = ndoutput.get(key);
|
||||
String payload = info.getContent();
|
||||
String mimetype = info.getMimetype();
|
||||
String abstractStr = info.getAbstractStr();
|
||||
|
||||
AnalysisLogger.getLogger().debug("IOWPS Information: " + "name "+info.getName()+","
|
||||
+"abstr "+info.getAbstractStr()+","
|
||||
+"content "+info.getContent()+","
|
||||
+"def "+info.getDefaultVal()+",");
|
||||
|
||||
if ((abstractStr==null || abstractStr.trim().length()==0) && (payload!= null && payload.trim().length()>0))
|
||||
abstractStr = info.getName();
|
||||
else if (abstractStr == null)
|
||||
abstractStr = "";
|
||||
|
||||
//geospatialized
|
||||
// XMLString += " <ogr:Result fid=\"F" + count+ "\">" + "<ogr:geometryProperty><gml:Point><gml:coordinates>0,0</gml:coordinates></gml:Point></ogr:geometryProperty>"+ " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </ogr:Result>\n";
|
||||
XMLString += " <ogr:Result fid=\"F" + count+ "\">" + " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + abstractStr + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </ogr:Result>\n";
|
||||
count++;
|
||||
}
|
||||
XMLString += " </gml:featureMember>\n</ogr:FeatureCollection>";
|
||||
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("Non deterministic output: " + XMLString);
|
||||
|
||||
XmlObject xmlData = XmlObject.Factory.newInstance();
|
||||
ByteArrayInputStream xstream = new ByteArrayInputStream(XMLString.getBytes());
|
||||
xmlData = XmlObject.Factory.parse(xstream);
|
||||
AnalysisLogger.getLogger().debug("Output has been correctly parsed");
|
||||
|
||||
return xmlData;
|
||||
}
|
||||
|
||||
public void shutdown(){
|
||||
try{
|
||||
storageclient.close();
|
||||
}catch(Exception e){
|
||||
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,384 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import java.awt.Image;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileWriter;
|
||||
import java.io.InputStream;
|
||||
import java.sql.Connection;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalTypeList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
|
||||
import org.gcube.dataanalysis.ecoengine.utils.IOHelper;
|
||||
import org.gcube.dataanalysis.executor.scripts.OSCommand;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.utils.FileManager;
|
||||
import org.postgresql.copy.CopyManager;
|
||||
import org.postgresql.core.BaseConnection;
|
||||
|
||||
import scala.actors.threadpool.Arrays;
|
||||
|
||||
public class StatisticalTypeToWPSType {
|
||||
public static Properties templates;
|
||||
static String ABSTRACT = "#ABSTRACT#";
|
||||
static String TITLE = "#TITLE#";
|
||||
static String CLASSNAME = "#CLASSNAME#";
|
||||
static String ALLOWED = "#ALLOWED#";
|
||||
static String DEFAULT = "#DEFAULT#";
|
||||
static String ID = "#ID#";
|
||||
static String IDMETHOD = "#IDMETHOD#";
|
||||
|
||||
public LinkedHashMap<String, IOWPSInformation> inputSet = new LinkedHashMap<String, IOWPSInformation>();
|
||||
public LinkedHashMap<String, IOWPSInformation> outputSet = new LinkedHashMap<String, IOWPSInformation>();
|
||||
public List<File> generatedFiles = new ArrayList<File>();
|
||||
public List<String> generatedTables = new ArrayList<String>();
|
||||
|
||||
public List<File> getGeneratedFiles() {
|
||||
return generatedFiles;
|
||||
}
|
||||
|
||||
public List<String> getGeneratedTables() {
|
||||
return generatedTables;
|
||||
}
|
||||
|
||||
public synchronized void getTemplates() throws Exception {
|
||||
if (templates != null)
|
||||
return;
|
||||
|
||||
templates = new Properties();
|
||||
InputStream is = this.getClass().getClassLoader().getResourceAsStream("templates/classtemplate.properties");
|
||||
templates.load(is);
|
||||
is.close();
|
||||
}
|
||||
|
||||
public StatisticalTypeToWPSType() throws Exception {
|
||||
getTemplates();
|
||||
}
|
||||
|
||||
public String cleanID(String name) {
|
||||
return name.replaceAll("[ \\]\\[!\"#$%&'()*+,\\./:;<=>?@\\^`{|}~-]", "_");
|
||||
// return name;
|
||||
}
|
||||
|
||||
public String convert2WPSType(StatisticalType stype, boolean isinput, AlgorithmConfiguration config) throws Exception {
|
||||
if (stype == null)
|
||||
return "";
|
||||
|
||||
String wpstype = null;
|
||||
String outputType = "";
|
||||
TableTemplatesMapper mapper = new TableTemplatesMapper();
|
||||
|
||||
String webpersistence = config.getParam(ConfigurationManager.webPersistencePathVariable);
|
||||
AnalysisLogger.getLogger().debug("Using the foll. web persistence: " + webpersistence);
|
||||
|
||||
String name = stype.getName();
|
||||
String classForname = stype.getClass().getSimpleName();
|
||||
if (name == null || name.length() == 0)
|
||||
name = classForname.replace(".", "");
|
||||
String id = (name);
|
||||
String abstractStr = stype.getDescription() != null ? stype.getDescription() : "";
|
||||
String allowed = "";
|
||||
String defaultVal = stype.getDefaultValue() != null ? stype.getDefaultValue() : "";
|
||||
|
||||
String content = null;
|
||||
String localcontent = null;
|
||||
String mimeType = "";
|
||||
if (stype instanceof PrimitiveType) {
|
||||
PrimitiveType ptype = (PrimitiveType) stype;
|
||||
PrimitiveTypes subtype = ptype.getType();
|
||||
|
||||
switch (subtype) {
|
||||
case STRING:
|
||||
content = (String) ptype.getContent();
|
||||
outputType = "string";
|
||||
mimeType = "text/plain";
|
||||
break;
|
||||
case NUMBER: {
|
||||
content = "" + ptype.getContent();
|
||||
String classname = ptype.getClassName();
|
||||
outputType = "integer";
|
||||
mimeType = "text/plain";
|
||||
if (!classname.equals(Integer.class.getName())) {
|
||||
outputType = "double";
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case ENUMERATED: {
|
||||
Object contentObj = ptype.getContent();
|
||||
content = Arrays.toString((Object[]) ptype.getContent());
|
||||
outputType = "enumerated";
|
||||
mimeType = "text/plain";
|
||||
Object[] allowedObjs = (Object[]) contentObj;
|
||||
allowed = "";
|
||||
|
||||
// generation of allowed values with check of the default value
|
||||
String candidatedefaultvalue = "";
|
||||
boolean defaultfound = false;
|
||||
for (int i = 0; i < allowedObjs.length; i++) {
|
||||
String allowedS = ("" + allowedObjs[i]).trim();
|
||||
allowed += "\"" + allowedS + "\"";
|
||||
if (i == 0)
|
||||
candidatedefaultvalue = allowedS;
|
||||
if (allowedS.equals(defaultVal))
|
||||
defaultfound = true;
|
||||
if (i < allowedObjs.length - 1)
|
||||
allowed += ",";
|
||||
}
|
||||
|
||||
if (!defaultfound)
|
||||
defaultVal = candidatedefaultvalue;
|
||||
|
||||
break;
|
||||
}
|
||||
case FILE:
|
||||
String filename = "";
|
||||
if (ptype.getContent() != null) {
|
||||
String originalfile = ((File) ptype.getContent()).getAbsolutePath();
|
||||
((File) ptype.getContent()).getAbsolutePath();
|
||||
// search for the object in various locations
|
||||
AnalysisLogger.getLogger().debug("Searching for file in: " + originalfile);
|
||||
if (!new File(originalfile).exists()) {
|
||||
originalfile = new File(config.getPersistencePath(), ((File) ptype.getContent()).getName()).getAbsolutePath();
|
||||
AnalysisLogger.getLogger().debug("Searching for file in persistence path: " + originalfile);
|
||||
if (!new File(originalfile).exists()) {
|
||||
originalfile = new File(config.getConfigPath(), ((File) ptype.getContent()).getName()).getAbsolutePath();
|
||||
AnalysisLogger.getLogger().debug("Searching for file in config path: " + originalfile);
|
||||
}
|
||||
}
|
||||
if (!new File(originalfile).exists()) {
|
||||
AnalysisLogger.getLogger().debug("The file does not exist! " + originalfile);
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("The file exists! " + originalfile);
|
||||
filename = ((File) ptype.getContent()).getName();
|
||||
String filenameDest = System.currentTimeMillis() + "_" + filename;
|
||||
String destinationfile = new File(webpersistence, filenameDest).getAbsolutePath();
|
||||
AnalysisLogger.getLogger().debug("Copying file into a temporary file: " + destinationfile);
|
||||
|
||||
FileManager.FileCopy(originalfile, destinationfile);
|
||||
content = config.getParam(ConfigurationManager.webpathVariable) + filenameDest;
|
||||
localcontent = destinationfile;
|
||||
AnalysisLogger.getLogger().debug("Web content associated to the file is: " + content);
|
||||
generatedFiles.add(new File(originalfile));
|
||||
}
|
||||
}
|
||||
if (filename.toLowerCase().endsWith(".csv") || filename.toLowerCase().endsWith(".txt")) {
|
||||
outputType = "csvFile";
|
||||
mimeType = "text/csv";
|
||||
} else {
|
||||
outputType = "d4scienceFile";
|
||||
mimeType = "application/d4science";
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("File managed correctly: Type: " + outputType + " mimetype: " + mimeType);
|
||||
break;
|
||||
case MAP: {
|
||||
Map<String, StatisticalType> subelements = (Map<String, StatisticalType>) ptype.getContent();
|
||||
wpstype = "";
|
||||
int counter = 1;
|
||||
for (String subel : subelements.keySet()) {
|
||||
StatisticalType stsub = subelements.get(subel);
|
||||
String sclassForname = stsub.getClass().getSimpleName();
|
||||
if (stsub.getName() == null || stsub.getName().length() == 0)
|
||||
stsub.setName(sclassForname.replace(".", "") + counter);
|
||||
|
||||
wpstype = wpstype + "\n" + convert2WPSType(stsub, isinput, config);
|
||||
counter++;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case BOOLEAN:
|
||||
outputType = "boolean";
|
||||
mimeType = "text/plain";
|
||||
break;
|
||||
case IMAGES: {
|
||||
// content = ptype.getContent();
|
||||
Map<String, Image> subelements = (Map<String, Image>) ptype.getContent();
|
||||
wpstype = "";
|
||||
if (subelements != null && subelements.size() > 0) {
|
||||
for (String subel : subelements.keySet()) {
|
||||
// Image stsub = subelements.get(subel);
|
||||
outputType = "pngFile";
|
||||
outputType += isinput ? "Input" : "Output";
|
||||
|
||||
wpstype = wpstype + "\n" + ((String) templates.get(outputType)).replace(ABSTRACT, subel).replace(TITLE, subel).replace(ID, subel).replace(IDMETHOD, cleanID(id)).replace(DEFAULT, defaultVal);
|
||||
|
||||
String imagefilename = new File(webpersistence, subel + "_" + UUID.randomUUID() + ".png").getAbsolutePath();
|
||||
BufferedImage bi = ImageTools.toBufferedImage(subelements.get(subel));
|
||||
File f = new File(imagefilename);
|
||||
|
||||
ImageIO.write(bi, "png", f);
|
||||
|
||||
// upload on WS and get URL - TOO SLOW!
|
||||
// String url =
|
||||
// AbstractEcologicalEngineMapper.uploadOnWorkspaceAndGetURL(config.getGcubeScope(),
|
||||
// imagefilename);
|
||||
String url = config.getParam(ConfigurationManager.webpathVariable) + f.getName();
|
||||
AnalysisLogger.getLogger().debug("Got URL for the file " + url);
|
||||
|
||||
IOWPSInformation info = new IOWPSInformation();
|
||||
info.setName(subel);
|
||||
info.setAbstractStr(subel);
|
||||
info.setAllowed(allowed);
|
||||
info.setContent(url);
|
||||
info.setLocalMachineContent(imagefilename);
|
||||
info.setDefaultVal(defaultVal);
|
||||
info.setMimetype("image/png");
|
||||
info.setClassname(classForname);
|
||||
generatedFiles.add(f);
|
||||
if (isinput)
|
||||
inputSet.put(subel, info);
|
||||
else
|
||||
outputSet.put(subel, info);
|
||||
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
} else if (stype instanceof PrimitiveTypesList) {
|
||||
String format = ((PrimitiveTypesList) stype).getClassName();
|
||||
format = format.substring(format.lastIndexOf(".") + 1);
|
||||
PrimitiveType pptype = new PrimitiveType(((PrimitiveTypesList) stype).getClassName(), null, ((PrimitiveTypesList) stype).getType(), name, abstractStr + " [a sequence of values separated by | ] (format: " + format + ")", defaultVal);
|
||||
pptype.setType(PrimitiveTypes.STRING);
|
||||
pptype.setClassName(String.class.getName());
|
||||
wpstype = convert2WPSType(pptype, isinput, config);
|
||||
} else if (stype instanceof ColumnType) {
|
||||
outputType = "string";
|
||||
abstractStr += " [the name of a column from " + ((ColumnType) stype).getTableName() + "]";
|
||||
mimeType = "text/plain";
|
||||
} else if (stype instanceof ColumnTypesList) {
|
||||
outputType = "string";
|
||||
abstractStr += " [a sequence of names of columns from " + ((ColumnTypesList) stype).getTabelName() + " separated by | ]";
|
||||
mimeType = "text/plain";
|
||||
} else if (stype instanceof DatabaseType) {
|
||||
return null;
|
||||
} else if (stype instanceof OutputTable) {
|
||||
// content = ((OutputTable) stype).getTableName();
|
||||
String tablename = (String) ((OutputTable) stype).getTableName();
|
||||
|
||||
outputType = "csvFile";
|
||||
mimeType = "text/csv";
|
||||
String template = ((OutputTable) stype).getTemplateNames().get(0).name();
|
||||
abstractStr += " [a http link to a table in UTF-8 ecoding following this template: " + mapper.linksMap.get(template) + "]";
|
||||
if (tablename != null && tablename.length() > 0) {
|
||||
generatedTables.add(tablename);
|
||||
String localfile = new File(webpersistence, tablename + UUID.randomUUID() + ".csv").getAbsolutePath();
|
||||
AnalysisLogger.getLogger().debug("Creating file " + localfile + " from table " + content);
|
||||
dumpTable(localfile, tablename, ",", config.databaseUserName, config.databasePassword, config.databaseURL);
|
||||
AnalysisLogger.getLogger().debug("File " + localfile + " has been created");
|
||||
// upload on WS and get URL
|
||||
// String url =
|
||||
// AbstractEcologicalEngineMapper.uploadOnWorkspaceAndGetURL(config.getGcubeScope(),
|
||||
// localfile);
|
||||
String url = config.getParam(ConfigurationManager.webpathVariable) + new File(localfile).getName();
|
||||
AnalysisLogger.getLogger().debug("Got URL for file " + url);
|
||||
content = url;
|
||||
localcontent = localfile;
|
||||
}
|
||||
// upload on storage and get URL
|
||||
|
||||
} else if (stype instanceof InputTable) {
|
||||
outputType = "csvFile";
|
||||
mimeType = "text/csv";
|
||||
String template = ((InputTable) stype).getTemplateNames().get(0).name();
|
||||
abstractStr += " [a http link to a table in UTF-8 encoding following this template: " + mapper.linksMap.get(template) + "]";
|
||||
} else if (stype instanceof ServiceType) {
|
||||
return null;
|
||||
} else if (stype instanceof StatisticalTypeList) {
|
||||
return null;
|
||||
} else if (stype instanceof TablesList) {
|
||||
outputType = "csvFile";
|
||||
String template = ((TablesList) stype).getTemplates().get(0).name();
|
||||
abstractStr += " [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: " + mapper.linksMap.get(template) + "]";
|
||||
mimeType = "text/csv";
|
||||
}
|
||||
|
||||
outputType += isinput ? "Input" : "Output";
|
||||
|
||||
if (name == null)
|
||||
name = outputType;
|
||||
|
||||
if (wpstype == null) {
|
||||
// wpstype = ((String) templates.get(outputType)).replace(ABSTRACT,
|
||||
// abstractStr).replace(TITLE, name).replace(ID,
|
||||
// id).replace(DEFAULT, defaultVal).replace(ALLOWED, allowed);
|
||||
wpstype = ((String) templates.get(outputType)).replace(ABSTRACT, "Name of the parameter: " + name + ". " + abstractStr).replace(TITLE, abstractStr).replace(ID, id).replace(IDMETHOD, cleanID(id)).replace(DEFAULT, defaultVal).replace(ALLOWED, allowed);
|
||||
|
||||
IOWPSInformation info = new IOWPSInformation();
|
||||
info.setName(name);
|
||||
info.setAllowed(allowed);
|
||||
info.setContent(content);
|
||||
info.setDefaultVal(defaultVal);
|
||||
info.setLocalMachineContent(localcontent);
|
||||
info.setMimetype(mimeType);
|
||||
info.setClassname(classForname);
|
||||
info.setAbstractStr(abstractStr);
|
||||
|
||||
if (localcontent != null)
|
||||
generatedFiles.add(new File(localcontent));
|
||||
if (isinput)
|
||||
inputSet.put(name, info);
|
||||
else
|
||||
outputSet.put(name, info);
|
||||
|
||||
}
|
||||
return wpstype.trim();
|
||||
}
|
||||
|
||||
public void dumpTable(String filePath, String tablename, String delimiter, String username, String password, String databaseurl) throws Exception {
|
||||
|
||||
Connection conn = DatabaseFactory.getDBConnection("org.postgresql.Driver", username, password, databaseurl);
|
||||
CopyManager copyManager = new CopyManager((BaseConnection) conn);
|
||||
FileWriter fw = new FileWriter(filePath);
|
||||
copyManager.copyOut(String.format("COPY %s TO STDOUT WITH DELIMITER '%s' NULL AS '' CSV HEADER QUOTE AS '\"'", tablename, delimiter), fw);
|
||||
conn.close();
|
||||
fw.close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* enum a { A, B };
|
||||
*/
|
||||
public static void main(String[] args) throws Exception {
|
||||
// Object[] elements = a.values();
|
||||
AlgorithmConfiguration config = new AlgorithmConfiguration();
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
// config.setParam(AbstractEcologicalEngineMapper.processingSession,
|
||||
// ""+UUID.randomUUID());
|
||||
// config.setParam(AbstractEcologicalEngineMapper.serviceUserNameParameter,
|
||||
// "wps.synch");
|
||||
|
||||
// AbstractEcologicalEngineMapper.uploadOnWorkspaceAndGetURL(config,new
|
||||
// File( "./datasets/hcaf_d_mini.csv"), "test gp for WPS", "text/csv");
|
||||
|
||||
}
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
|
||||
|
||||
public class TableTemplatesMapper {
|
||||
|
||||
|
||||
public HashMap<String,String> dbTemplatesMap = new HashMap<String, String>();
|
||||
public HashMap<String,String> linksMap = new HashMap<String, String>();
|
||||
public HashMap<String,String> varsMap = new HashMap<String, String>();
|
||||
|
||||
public TableTemplatesMapper(){
|
||||
variablesMapping();
|
||||
tablesMapping();
|
||||
linksMapping();
|
||||
}
|
||||
|
||||
public void tablesMapping(){
|
||||
dbTemplatesMap = new HashMap<String, String>();
|
||||
dbTemplatesMap.put(TableTemplates.HSPEN.name(), "CREATE TABLE #table_name# (#vars#, CONSTRAINT #table_name#_pkey PRIMARY KEY (speciesid, lifestage))");
|
||||
dbTemplatesMap.put(TableTemplates.HCAF.name(), "CREATE TABLE #table_name# (#vars#, CONSTRAINT #table_name#_pkey PRIMARY KEY (csquarecode))");
|
||||
dbTemplatesMap.put(TableTemplates.HSPEC.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.OCCURRENCE_AQUAMAPS.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.OCCURRENCE_SPECIES.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.MINMAXLAT.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.TRAININGSET.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.TESTSET.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.CLUSTER.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.TIMESERIES.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
dbTemplatesMap.put(TableTemplates.GENERIC.name(), "CREATE TABLE #table_name# (#vars#)");
|
||||
}
|
||||
|
||||
public void linksMapping(){
|
||||
linksMap = new HashMap<String, String>();
|
||||
linksMap.put(TableTemplates.HSPEN.name(), "(HSPEN) http://goo.gl/4zDiAK");
|
||||
linksMap.put(TableTemplates.HCAF.name(), "(HCAF) http://goo.gl/SZG9uM");
|
||||
linksMap.put(TableTemplates.HSPEC.name(),"(HSPEC) http://goo.gl/OvKa1h");
|
||||
linksMap.put(TableTemplates.OCCURRENCE_AQUAMAPS.name(), "(OCCURRENCE_AQUAMAPS) http://goo.gl/vHil5T");
|
||||
linksMap.put(TableTemplates.OCCURRENCE_SPECIES.name(), "(OCCURRENCE_SPECIES) http://goo.gl/4ExuR5");
|
||||
linksMap.put(TableTemplates.MINMAXLAT.name(), "(MINMAXLAT) http://goo.gl/cRzwgN");
|
||||
linksMap.put(TableTemplates.TRAININGSET.name(), "(TRAININGSET) http://goo.gl/Br44UQ");
|
||||
linksMap.put(TableTemplates.TESTSET.name(), "(TESTSET) http://goo.gl/LZHNXt");
|
||||
linksMap.put(TableTemplates.CLUSTER.name(), "(CLUSTER) http://goo.gl/PnKhhb");
|
||||
linksMap.put(TableTemplates.TIMESERIES.name(), "(TIMESERIES) http://goo.gl/DoW6fg");
|
||||
linksMap.put(TableTemplates.GENERIC.name(), "(GENERIC) A generic comma separated csv file in UTF-8 encoding");
|
||||
}
|
||||
|
||||
public void variablesMapping(){
|
||||
varsMap = new HashMap<String, String>();
|
||||
varsMap.put(TableTemplates.HSPEN.name(), "speccode integer, speciesid character varying NOT NULL, lifestage character varying NOT NULL, faoareas character varying(100), faoareasref character varying, faocomplete smallint, nmostlat real, smostlat real, wmostlong real, emostlong real, lme character varying(180), depthyn smallint, depthmin integer, depthmax integer, depthprefmin integer, depthprefmax integer, meandepth smallint, depthref character varying, pelagic smallint, tempyn smallint, tempmin real, tempmax real, tempprefmin real, tempprefmax real, tempref character varying, salinityyn smallint, salinitymin real, salinitymax real, salinityprefmin real, salinityprefmax real, salinityref character varying, primprodyn smallint, primprodmin real, primprodmax real, primprodprefmin real, primprodprefmax real, primprodprefref character varying, iceconyn smallint, iceconmin real, iceconmax real, iceconprefmin real, iceconprefmax real, iceconref character varying, landdistyn smallint, landdistmin real, landdistmax real, landdistprefmin real, landdistprefmax real, landdistref character varying, remark character varying, datecreated timestamp without time zone, datemodified timestamp without time zone, expert integer, dateexpert timestamp without time zone, envelope smallint, mapdata smallint, effort smallint, layer character(1), usepoints smallint, rank smallint");
|
||||
varsMap.put(TableTemplates.HCAF.name(), "csquarecode character varying(10) NOT NULL, depthmin real, depthmax real, depthmean real, depthsd real, sstanmean real, sstansd real, sstmnmax real, sstmnmin real, sstmnrange real, sbtanmean real, salinitymean real, salinitysd real, salinitymax real, salinitymin real, salinitybmean real, primprodmean integer, iceconann real, iceconspr real, iceconsum real, iceconfal real, iceconwin real, faoaream integer, eezall character varying, lme integer, landdist integer, oceanarea real, centerlat real, centerlong real");
|
||||
varsMap.put(TableTemplates.HSPEC.name(), "speciesid character varying, csquarecode character varying, probability real, boundboxyn smallint, faoareayn smallint, faoaream integer, eezall character varying, lme integer");
|
||||
varsMap.put(TableTemplates.OCCURRENCE_AQUAMAPS.name(), "csquarecode character varying(10) NOT NULL, speciesid character varying NOT NULL, speccode integer, goodcell smallint, infaoarea smallint, inboundbox smallint, centerlat numeric, centerlong numeric, faoaream smallint, recordid integer NOT NULL");
|
||||
varsMap.put(TableTemplates.OCCURRENCE_SPECIES.name(), "institutioncode character varying, collectioncode character varying, cataloguenumber character varying, dataset character varying, dataprovider character varying, datasource character varying, scientificnameauthorship character varying, identifiedby character varying, credits character varying, recordedby character varying, eventdate timestamp without time zone, modified timestamp without time zone, scientificname character varying, kingdom character varying, family character varying, locality character varying, country character varying, citation character varying, decimallatitude double precision, decimallongitude double precision, coordinateuncertaintyinmeters character varying, maxdepth double precision, mindepth double precision, basisofrecord character varying" );
|
||||
varsMap.put(TableTemplates.MINMAXLAT.name(), "speciesid character varying, maxclat real, minclat real");
|
||||
varsMap.put(TableTemplates.TRAININGSET.name(), "<column_name_1 real, column_name_2 real, ..., column_name_n real>, label real, groupID character varying");
|
||||
varsMap.put(TableTemplates.TESTSET.name(), "<column_name_1 real, column_name_2 real, ..., column_name_n real>, tvalue real");
|
||||
varsMap.put(TableTemplates.CLUSTER.name(), "<column_name_1 real, column_name_2 real, ..., column_name_n real> , clusterid character varying, outlier boolean");
|
||||
varsMap.put(TableTemplates.TIMESERIES.name(), "<column_name_1 real, column_name_2 real, ..., column_name_n real>, time timestamp without time zone");
|
||||
varsMap.put(TableTemplates.GENERIC.name(), "<column_name_1 real, column_name_2 real, ..., column_name_n real>");
|
||||
}
|
||||
|
||||
public String generateCreateStatement(String tablename, String template, String variables){
|
||||
return dbTemplatesMap.get(template).replace("#table_name#", tablename).replace("#vars#", variables);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
|
||||
import org.gcube.common.authorization.library.provider.UserInfo;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
|
||||
public class TokenManager {
|
||||
|
||||
String username;
|
||||
String scope;
|
||||
|
||||
|
||||
public String getScope(){
|
||||
return scope;
|
||||
}
|
||||
|
||||
public String getUserName(){
|
||||
return username;
|
||||
}
|
||||
|
||||
public void getCredentials() {
|
||||
try{
|
||||
System.out.println("Retrieving token credentials");
|
||||
scope = ScopeProvider.instance.get();
|
||||
System.out.println("Credentials from the GHN: scope: "+scope);
|
||||
//get username from SmartGears
|
||||
UserInfo token = AuthorizationProvider.instance.get();
|
||||
username = token.getUserName();
|
||||
System.out.println("Credentials from the GHN: user: "+username);
|
||||
}catch(Exception e){
|
||||
e.printStackTrace();
|
||||
System.out.println("Error Retrieving token credentials: "+e.getLocalizedMessage());
|
||||
scope = null;
|
||||
username= null;
|
||||
}
|
||||
if ((scope==null || username==null) && ConfigurationManager.isSimulationMode()){
|
||||
scope = ConfigurationManager.defaultScope;
|
||||
username = ConfigurationManager.defaultUsername;
|
||||
}
|
||||
System.out.println("Retrieved scope: "+scope+" Username: "+username +" SIMULATION MODE: "+ConfigurationManager.isSimulationMode());
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||
|
||||
public class WorkspaceManager {
|
||||
|
||||
/*
|
||||
public static String uploadOnWorkspaceAndGetURL(AlgorithmConfiguration config, File localfile,String description,String mimeType) throws Exception{
|
||||
String url = null;
|
||||
ScopeProvider.instance.set(config.getGcubeScope());
|
||||
|
||||
Workspace ws = HomeLibrary
|
||||
.getHomeManagerFactory()
|
||||
.getHomeManager()
|
||||
.getHome("gianpaolo.coro").getWorkspace();
|
||||
|
||||
WorkspaceFolder root = ws.getRoot();
|
||||
|
||||
String session = config.getParam(processingSession);
|
||||
|
||||
WorkspaceFolder folder = root.createFolder("WPS Synch "+session, "WPS Synch - Folder of session: "+session);
|
||||
|
||||
InputStream is = new FileInputStream(localfile);
|
||||
FolderItem item = WorkspaceUtil.createExternalFile(root, localfile.getName(), description, mimeType, is);
|
||||
url = item.getPublicLink(false);
|
||||
|
||||
if (url==null)
|
||||
throw new Exception ("Error: could not upload "+localfile.getAbsolutePath()+" on the Workspace in scope "+config.getGcubeScope());
|
||||
return url;
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace;
|
||||
|
||||
|
||||
public class ComputationData {
|
||||
|
||||
|
||||
public ComputationData(String name, String operator, String operatorDescription, String infrastructure, String startDate, String endDate, String status, String id, String user) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.operator = operator;
|
||||
this.operatorDescription = operatorDescription;
|
||||
this.infrastructure = infrastructure;
|
||||
this.startDate = startDate;
|
||||
this.endDate = endDate;
|
||||
this.status = status;
|
||||
this.id = id;
|
||||
this.user=user;
|
||||
}
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
public String getOperator() {
|
||||
return operator;
|
||||
}
|
||||
public void setOperator(String operator) {
|
||||
this.operator = operator;
|
||||
}
|
||||
public String getOperatorDescription() {
|
||||
return operatorDescription;
|
||||
}
|
||||
public void setOperatorDescription(String operatorDescription) {
|
||||
this.operatorDescription = operatorDescription;
|
||||
}
|
||||
public String getInfrastructure() {
|
||||
return infrastructure;
|
||||
}
|
||||
public void setInfrastructure(String infrastructure) {
|
||||
this.infrastructure = infrastructure;
|
||||
}
|
||||
public String getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
public void setStartDate(String startDate) {
|
||||
this.startDate = startDate;
|
||||
}
|
||||
public String getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
public void setEndDate(String endDate) {
|
||||
this.endDate = endDate;
|
||||
}
|
||||
public String getStatus() {
|
||||
return status;
|
||||
}
|
||||
public void setStatus(String status) {
|
||||
this.status = status;
|
||||
}
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
public String getException() {
|
||||
return exception;
|
||||
}
|
||||
public void setException(String exception) {
|
||||
this.exception = exception;
|
||||
}
|
||||
public String exception;
|
||||
public String name;
|
||||
public String operator;
|
||||
public String operatorDescription;
|
||||
public String infrastructure;
|
||||
public String startDate;
|
||||
public String endDate;
|
||||
public String status;
|
||||
public String id;
|
||||
public String user;
|
||||
|
||||
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace;
|
||||
|
||||
public enum DataProvenance {
|
||||
IMPORTED,COMPUTED
|
||||
}
|
|
@ -1,345 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.homelibrary.home.Home;
|
||||
import org.gcube.common.homelibrary.home.HomeLibrary;
|
||||
import org.gcube.common.homelibrary.home.HomeManager;
|
||||
import org.gcube.common.homelibrary.home.HomeManagerFactory;
|
||||
import org.gcube.common.homelibrary.home.User;
|
||||
import org.gcube.common.homelibrary.home.workspace.Workspace;
|
||||
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
|
||||
import org.gcube.common.homelibrary.home.workspace.WorkspaceItem;
|
||||
import org.gcube.common.homelibrary.home.workspace.folder.FolderItem;
|
||||
import org.gcube.common.homelibrary.util.WorkspaceUtil;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;
|
||||
|
||||
public class DataspaceManager implements Runnable {
|
||||
|
||||
public static String dataminerFolder = "DataMiner";
|
||||
public static String importedDataFolder = "Input Data Sets";
|
||||
public static String computedDataFolder = "Output Data Sets";
|
||||
public static String computationsFolder = "Computations";
|
||||
AlgorithmConfiguration config;
|
||||
ComputationData computation;
|
||||
List<StoredData> inputData;
|
||||
List<StoredData> outputData;
|
||||
List<File> generatedFiles;
|
||||
|
||||
public static String computation_id = "computation_id";
|
||||
public static String data_id = "data_id";
|
||||
public static String data_type = "data_type";
|
||||
public static String operator_name = "operator_name";
|
||||
|
||||
public static String operator_description = "operator_description";
|
||||
public static String data_description = "data_description";
|
||||
public static String creation_date = "creation_date";
|
||||
public static String start_date = "start_date";
|
||||
public static String end_date = "end_date";
|
||||
public static String status = "status";
|
||||
public static String execution_platform = "execution_type";
|
||||
public static String error = "error";
|
||||
public static String IO = "IO";
|
||||
public static String operator = "operator";
|
||||
public static String payload = "payload";
|
||||
|
||||
public DataspaceManager(AlgorithmConfiguration config, ComputationData computation, List<StoredData> inputData, List<StoredData> outputData, List<File> generatedFiles) {
|
||||
this.config = config;
|
||||
this.computation = computation;
|
||||
this.inputData = inputData;
|
||||
this.outputData = outputData;
|
||||
this.generatedFiles = generatedFiles;
|
||||
}
|
||||
|
||||
public void run() {
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Deleting running computation");
|
||||
try {
|
||||
deleteRunningComputationData();
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->No running computation available");
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Writing provenance information");
|
||||
writeProvenance(computation, inputData, outputData);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug("Dataspace-> error writing provenance information " + e.getLocalizedMessage());
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void createFoldersNetwork(Workspace ws, WorkspaceFolder root) throws Exception {
|
||||
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Creating folders for DataMiner");
|
||||
|
||||
// manage folders: create the folders network
|
||||
if (!ws.exists(dataminerFolder, root.getId())) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Creating DataMiner main folder");
|
||||
root.createFolder(dataminerFolder, "A folder collecting DataMiner experiments data and computation information");
|
||||
}
|
||||
WorkspaceFolder dataminerFolderWS = (WorkspaceFolder) root.find(dataminerFolder);
|
||||
|
||||
if (!ws.exists(importedDataFolder, dataminerFolderWS.getId())) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Creating DataMiner imported data folder");
|
||||
dataminerFolderWS.createFolder(importedDataFolder, "A folder collecting DataMiner imported data");
|
||||
}
|
||||
if (!ws.exists(computedDataFolder, dataminerFolderWS.getId())) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Creating DataMiner computed data folder");
|
||||
dataminerFolderWS.createFolder(computedDataFolder, "A folder collecting DataMiner computed data");
|
||||
}
|
||||
if (!ws.exists(computationsFolder, dataminerFolderWS.getId())) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Creating DataMiner computations folder");
|
||||
dataminerFolderWS.createFolder(computationsFolder, "A folder collecting DataMiner computations information");
|
||||
}
|
||||
}
|
||||
|
||||
public String uploadData(StoredData data, WorkspaceFolder wsFolder) throws Exception {
|
||||
|
||||
String filenameonwsString = WorkspaceUtil.getUniqueName(data.name, wsFolder);
|
||||
InputStream in = null;
|
||||
String url = "";
|
||||
if (data.type == StoredType.DATA) {
|
||||
if (new File(data.payload).exists()) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Uploading file " + data.payload);
|
||||
in = new FileInputStream(new File(data.payload));
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Uploading via URL " + data.payload);
|
||||
URL urlc = new URL(data.payload);
|
||||
HttpURLConnection urlConnection = (HttpURLConnection) urlc.openConnection();
|
||||
in = new BufferedInputStream(urlConnection.getInputStream());
|
||||
}
|
||||
|
||||
// AnalysisLogger.getLogger().debug("Dataspace->final file name on ws " + data.name+" description "+data.description);
|
||||
|
||||
FolderItem fileItem = WorkspaceUtil.createExternalFile(wsFolder, filenameonwsString, data.description, null, in);
|
||||
fileItem.getProperties().addProperty(computation_id, data.computationId);
|
||||
fileItem.getProperties().addProperty(creation_date, data.creationDate);
|
||||
fileItem.getProperties().addProperty(operator, data.operator);
|
||||
fileItem.getProperties().addProperty(data_id, data.id);
|
||||
fileItem.getProperties().addProperty(data_description, data.description);
|
||||
fileItem.getProperties().addProperty(IO, data.provenance.name());
|
||||
fileItem.getProperties().addProperty(data_type, data.type.name());
|
||||
url = fileItem.getPublicLink(true);
|
||||
fileItem.getProperties().addProperty(payload, url);
|
||||
try {
|
||||
in.close();
|
||||
} catch (Exception e) {
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Dataspace->File created " + data.name);
|
||||
} else {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Uploading string " + data.payload);
|
||||
url = data.payload;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
public List<String> uploadInputData(List<StoredData> inputData, WorkspaceFolder dataminerFolder) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->uploading input data " + inputData.size());
|
||||
WorkspaceItem folderItem = dataminerFolder.find(importedDataFolder);
|
||||
List<String> urls = new ArrayList<String>();
|
||||
if (folderItem != null && folderItem.isFolder()) {
|
||||
WorkspaceFolder destinationFolder = (WorkspaceFolder) folderItem;
|
||||
for (StoredData input : inputData) {
|
||||
String url = uploadData(input, destinationFolder);
|
||||
urls.add(url);
|
||||
}
|
||||
} else
|
||||
AnalysisLogger.getLogger().debug("Dataspace->folder is not valid");
|
||||
|
||||
AnalysisLogger.getLogger().debug("Dataspace->finished uploading input data");
|
||||
return urls;
|
||||
}
|
||||
|
||||
public List<String> uploadOutputData(List<StoredData> outputData, WorkspaceFolder dataminerFolder) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->uploading output data" + outputData.size());
|
||||
WorkspaceItem folderItem = dataminerFolder.find(computedDataFolder);
|
||||
List<String> urls = new ArrayList<String>();
|
||||
if (folderItem != null && folderItem.isFolder()) {
|
||||
WorkspaceFolder destinationFolder = (WorkspaceFolder) folderItem;
|
||||
for (StoredData output : outputData) {
|
||||
String url = uploadData(output, destinationFolder);
|
||||
urls.add(url);
|
||||
}
|
||||
} else
|
||||
AnalysisLogger.getLogger().debug("Dataspace->folder is not valid");
|
||||
AnalysisLogger.getLogger().debug("Dataspace->finished uploading output data");
|
||||
return urls;
|
||||
}
|
||||
|
||||
public void uploadComputationData(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData, WorkspaceFolder dataminerFolder, Workspace ws) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->uploading computation data");
|
||||
WorkspaceItem folderItem = dataminerFolder.find(computationsFolder);
|
||||
if (folderItem != null && folderItem.isFolder()) {
|
||||
// create a folder in here
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Creating computation folder " + computation.id);
|
||||
WorkspaceFolder cfolder = ((WorkspaceFolder) folderItem);
|
||||
String cfoldername = WorkspaceUtil.getUniqueName(computation.id, cfolder);
|
||||
WorkspaceFolder newcomputationFolder = cfolder.createFolder(cfoldername, computation.operatorDescription);
|
||||
String itemType = "COMPUTATION";
|
||||
|
||||
// create IO folders
|
||||
AnalysisLogger.getLogger().debug("Dataspace->creating IO folders under "+cfoldername);
|
||||
newcomputationFolder.createFolder(importedDataFolder, importedDataFolder);
|
||||
newcomputationFolder.createFolder(computedDataFolder, computedDataFolder);
|
||||
|
||||
// copy IO in those folders
|
||||
List<String> inputurls = uploadInputData(inputData, newcomputationFolder);
|
||||
List<String> outputurls = uploadOutputData(outputData, newcomputationFolder);
|
||||
|
||||
AnalysisLogger.getLogger().debug("Dataspace->creating gCube Item");
|
||||
|
||||
// write a computation item for the computation
|
||||
LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
|
||||
properties.put(computation_id, computation.id);
|
||||
newcomputationFolder.getProperties().addProperty(computation_id, computation.id);
|
||||
properties.put(operator_name, config.getAgent());
|
||||
newcomputationFolder.getProperties().addProperty(operator_name, config.getAgent());
|
||||
properties.put(operator_description, computation.operatorDescription);
|
||||
newcomputationFolder.getProperties().addProperty(operator_description, computation.operatorDescription);
|
||||
properties.put(start_date, computation.startDate);
|
||||
newcomputationFolder.getProperties().addProperty(start_date, computation.startDate);
|
||||
properties.put(end_date, computation.endDate);
|
||||
newcomputationFolder.getProperties().addProperty(end_date, computation.endDate);
|
||||
properties.put(status, computation.status);
|
||||
newcomputationFolder.getProperties().addProperty(status, computation.status);
|
||||
properties.put(execution_platform, computation.infrastructure);
|
||||
newcomputationFolder.getProperties().addProperty(execution_platform, computation.infrastructure);
|
||||
int ninput = inputurls.size();
|
||||
int noutput = outputurls.size();
|
||||
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Adding input properties for "+ninput+" inputs");
|
||||
for (int i = 1; i <= ninput; i++) {
|
||||
properties.put("input"+i+"_"+inputData.get(i-1).name, inputurls.get(i-1));
|
||||
newcomputationFolder.getProperties().addProperty("input"+i+"_"+inputData.get(i-1).name, inputurls.get(i-1));
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Adding output properties for "+noutput+" outputs");
|
||||
for (int i = 1; i <= noutput; i++) {
|
||||
properties.put("output"+i+"_"+outputData.get(i-1).name, outputurls.get(i-1));
|
||||
newcomputationFolder.getProperties().addProperty("output"+i+"_"+outputData.get(i-1).name, outputurls.get(i-1));
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Saving properties to ProvO XML file "+noutput+" outputs");
|
||||
|
||||
/*XStream xstream = new XStream();
|
||||
String xmlproperties = xstream.toXML(properties);
|
||||
*/
|
||||
try{String xmlproperties = ProvOGenerator.toProvO(computation, inputData, outputData);
|
||||
|
||||
File xmltosave = new File(config.getPersistencePath(),"properties_"+UUID.randomUUID());
|
||||
FileTools.saveString(xmltosave.getAbsolutePath(), xmlproperties, true, "UTF-8");
|
||||
InputStream sis = new FileInputStream(xmltosave);
|
||||
WorkspaceUtil.createExternalFile(newcomputationFolder, computation.id+".xml", computation.operatorDescription, null, sis);
|
||||
sis.close();
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("Dataspace->Failed creating ProvO XML file "+e.getLocalizedMessage());
|
||||
AnalysisLogger.getLogger().debug(e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
List<String> scopes = new ArrayList<String>();
|
||||
scopes.add(config.getGcubeScope());
|
||||
ws.createGcubeItem(computation.id, computation.operatorDescription, scopes, computation.user, itemType, properties, newcomputationFolder.getId());
|
||||
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("Dataspace->finished uploading computation data");
|
||||
}
|
||||
|
||||
public void writeProvenance(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData) throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->connecting to Workspace");
|
||||
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
|
||||
HomeManager manager = factory.getHomeManager();
|
||||
AnalysisLogger.getLogger().debug("Dataspace->getting user");
|
||||
User user = manager.createUser(computation.user);
|
||||
Home home = manager.getHome(user);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->getting root folder");
|
||||
Workspace ws = home.getWorkspace();
|
||||
WorkspaceFolder root = ws.getRoot();
|
||||
AnalysisLogger.getLogger().debug("Dataspace->create folders network");
|
||||
createFoldersNetwork(ws, root);
|
||||
WorkspaceFolder dataminerItem = (WorkspaceFolder) root.find(dataminerFolder);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->uploading input files");
|
||||
// uploadInputData(inputData, dataminerItem);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->uploading output files");
|
||||
uploadOutputData(outputData, dataminerItem);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->uploading computation files");
|
||||
uploadComputationData(computation, inputData, outputData, dataminerItem, ws);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->provenance management finished");
|
||||
AnalysisLogger.getLogger().debug("Dataspace->deleting generated files");
|
||||
AbstractEcologicalEngineMapper.deleteGeneratedFiles(generatedFiles);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->generated files deleted");
|
||||
}
|
||||
|
||||
public void writeRunningComputationData() throws Exception {
|
||||
try {
|
||||
deleteRunningComputationData();
|
||||
} catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->impossible to delete running computation");
|
||||
}
|
||||
// AnalysisLogger.getLogger().debug("Dataspace->updating computation status");
|
||||
// AnalysisLogger.getLogger().debug("Dataspace->connecting to Workspace");
|
||||
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
|
||||
HomeManager manager = factory.getHomeManager();
|
||||
// AnalysisLogger.getLogger().debug("Dataspace->getting user");
|
||||
User user = manager.createUser(computation.user);
|
||||
Home home = manager.getHome(user);
|
||||
// AnalysisLogger.getLogger().debug("Dataspace->getting root folder");
|
||||
Workspace ws = home.getWorkspace();
|
||||
WorkspaceFolder root = ws.getRoot();
|
||||
// AnalysisLogger.getLogger().debug("Dataspace->create folders network");
|
||||
createFoldersNetwork(ws, root);
|
||||
WorkspaceFolder dataminerFolderWS = (WorkspaceFolder) root.find(dataminerFolder);
|
||||
WorkspaceItem computationsFolderItem = dataminerFolderWS.find(computationsFolder);
|
||||
// AnalysisLogger.getLogger().debug("Dataspace->Creating computation item " + computation.id+" with status"+computation.status);
|
||||
String itemType = "COMPUTATION";
|
||||
|
||||
// write a computation item for the computation
|
||||
LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
|
||||
properties.put(computation_id, computation.id);
|
||||
properties.put(operator_name, config.getAgent());
|
||||
properties.put(operator_description, computation.operatorDescription);
|
||||
properties.put(start_date, computation.startDate);
|
||||
properties.put(end_date, computation.endDate);
|
||||
properties.put(status, computation.status);
|
||||
properties.put(execution_platform, computation.infrastructure);
|
||||
if (computation.exception != null && computation.exception.length() > 0)
|
||||
properties.put(error, computation.exception);
|
||||
|
||||
List<String> scopes = new ArrayList<String>();
|
||||
scopes.add(config.getGcubeScope());
|
||||
ws.createGcubeItem(computation.id, computation.operatorDescription, scopes, computation.user, itemType, properties, computationsFolderItem.getId());
|
||||
|
||||
AnalysisLogger.getLogger().debug("Dataspace->finished uploading computation data");
|
||||
}
|
||||
|
||||
public void deleteRunningComputationData() throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Dataspace->deleting computation item");
|
||||
AnalysisLogger.getLogger().debug("Dataspace->connecting to Workspace");
|
||||
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
|
||||
HomeManager manager = factory.getHomeManager();
|
||||
AnalysisLogger.getLogger().debug("Dataspace->getting user");
|
||||
User user = manager.createUser(computation.user);
|
||||
Home home = manager.getHome(user);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->getting root folder");
|
||||
Workspace ws = home.getWorkspace();
|
||||
WorkspaceFolder root = ws.getRoot();
|
||||
WorkspaceFolder dataminerFolderWS = (WorkspaceFolder) root.find(dataminerFolder);
|
||||
WorkspaceItem computationsFolderItem = dataminerFolderWS.find(computationsFolder);
|
||||
AnalysisLogger.getLogger().debug("Dataspace->removing computation data");
|
||||
((WorkspaceFolder) computationsFolderItem).find(computation.id).remove();
|
||||
AnalysisLogger.getLogger().debug("Dataspace->finished removing computation data");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,219 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.transform.OutputKeys;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
public class ProvOGenerator {
|
||||
|
||||
static String document ="<prov:document xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" " +
|
||||
"xmlns:prov=\"http://www.w3.org/ns/prov#\" xmlns:d4s=\"http://d4science.org/#\">" +
|
||||
"#DOCUMENT#" +
|
||||
"</prov:document>";
|
||||
|
||||
static String activity = "<prov:activity prov:id=\"#ID#\">"+
|
||||
"<prov:startTime>#START_TIME#</prov:startTime>"+
|
||||
"<prov:endTime>#END_TIME#</prov:endTime>"+
|
||||
"<prov:type xsi:type=\"xsd:QName\">d4s:computation</prov:type>"+
|
||||
"<prov:softwareAgent prov:id=\"d4s:dataminer.d4science.org\" />"+
|
||||
"<prov:person prov:id=\"d4s:#PERSON#\" />"+
|
||||
"#ENTITIES#"+
|
||||
"</prov:activity>";
|
||||
|
||||
static String entity="<prov:entity prov:id=\"d4s:#ENTITY_NAME#\">"+
|
||||
//"<prov:type xsi:type=\"xsd:QName\"></prov:type>"+
|
||||
"<prov:value xsi:type=\"xsd:string\">#ENTITY_VALUE#</prov:value>"+
|
||||
"#SUBENTITY#"+
|
||||
"</prov:entity>";
|
||||
|
||||
static String entityWithTime="<prov:entity prov:id=\"d4s:#ENTITY_NAME#\">"+
|
||||
//"<prov:type xsi:type=\"xsd:QName\"></prov:type>"+
|
||||
"<prov:value xsi:type=\"xsd:string\">#ENTITY_VALUE#</prov:value>"+
|
||||
"<prov:time>#TIME#</prov:time>"+
|
||||
"#SUBENTITY#"+
|
||||
"</prov:entity>";
|
||||
|
||||
static String attribute = "<prov:type xsi:type=\"xsd:QName\">d4s:#NAME#</prov:type>";
|
||||
|
||||
static String referenceActivity = "<prov:activity prov:ref=\"d4s:#ID#\"/>";
|
||||
static String referenceEntity = "<prov:entity prov:ref=\"d4s:#ID#\"/>";
|
||||
|
||||
public static void main(String[] args) {
|
||||
String name = "DBSCAN_1234";
|
||||
String startDate = "17/03/2016 11:32:22";
|
||||
String endDate = "17/03/2016 12:42:22";
|
||||
String operator = "DBSCAN";
|
||||
String operatorDescription = "example test";
|
||||
String infrastructure = "LOCAL";
|
||||
String status = "100";
|
||||
String id = name;
|
||||
String user = "gianpaolo.coro";
|
||||
ComputationData computation = new ComputationData(name, operator, operatorDescription, infrastructure, startDate, endDate, status, name,user);
|
||||
/*
|
||||
V public static String operator_description="operator_description";
|
||||
Vpublic static String data_description="data_description";
|
||||
public static String creation_date="creation_date";
|
||||
public static String start_date="start_date";
|
||||
public static String end_date="end_date";
|
||||
public static String status="status";
|
||||
public static String execution_type="execution_type";
|
||||
public static String error="error";
|
||||
public static String IO="IO";
|
||||
public static String operator="operator";
|
||||
*/
|
||||
List<StoredData> inputData = new ArrayList<StoredData>();
|
||||
List<StoredData> outputData = new ArrayList<StoredData>();
|
||||
StoredData in = new StoredData("inputT1","descrT1", "inputT1", DataProvenance.IMPORTED, "15/03/2016 11:32:22", operator, id, StoredType.STRING, "hello");
|
||||
inputData.add(in);
|
||||
StoredData out = new StoredData("outputT1","descrT1", "outputT1", DataProvenance.IMPORTED, "16/03/2016 11:32:22", operator, id, StoredType.STRING, "hellooutput");
|
||||
outputData.add(out);
|
||||
//System.out.println(dataToEntity(in));
|
||||
System.out.println(toProvO(computation, inputData, outputData));
|
||||
}
|
||||
|
||||
public static String getDataIOAttribute(String IO){
|
||||
return attribute(IO);
|
||||
}
|
||||
public static String getDataTypeAttribute(String type){
|
||||
return attribute(type);
|
||||
}
|
||||
public static String getDataDescriptionEntity(String datadescription){
|
||||
return entity(DataspaceManager.data_description, datadescription);
|
||||
}
|
||||
|
||||
public static String getOperatorRefEntity(String operator_id){
|
||||
return refentity(operator_id);
|
||||
}
|
||||
|
||||
public static String getComputationRefEntity(String computation_id){
|
||||
return refactivity(computation_id);
|
||||
}
|
||||
|
||||
public static String dataToEntity(StoredData data){
|
||||
String io = getDataIOAttribute(data.provenance.name());
|
||||
String type = getDataTypeAttribute(data.type.name());
|
||||
String description = getDataDescriptionEntity(data.description);
|
||||
String operator = getOperatorRefEntity(data.operator);
|
||||
String computation = getComputationRefEntity(data.computationId);
|
||||
String subentity = computation+operator+description+io+type;
|
||||
String dataEntity = completeEntityWithTime(data.id, data.payload, data.creationDate, subentity);
|
||||
return dataEntity;
|
||||
}
|
||||
|
||||
public static String getStatusEntity(String status){
|
||||
return entity(DataspaceManager.status, status);
|
||||
}
|
||||
|
||||
public static String getExecutionPlatformEntity(String executionPlatform){
|
||||
return entity(DataspaceManager.execution_platform, executionPlatform);
|
||||
}
|
||||
|
||||
public static String getOperatorDescriptionEntity(String description){
|
||||
return entity(DataspaceManager.operator_description, description);
|
||||
}
|
||||
|
||||
public static String getOperatorEntity(String operator){
|
||||
return entity(DataspaceManager.operator, operator);
|
||||
}
|
||||
|
||||
public static String computationToAction(ComputationData computation,String subEntities){
|
||||
String status = getStatusEntity(computation.status);
|
||||
String description = getOperatorDescriptionEntity(computation.operatorDescription);
|
||||
String operator = getOperatorEntity(computation.operator);
|
||||
String subents =operator+description+status+subEntities;
|
||||
String activity = completeActivity(computation.id,computation.startDate,computation.endDate,computation.user,subents);
|
||||
|
||||
return activity;
|
||||
}
|
||||
|
||||
public static String toProvO(ComputationData computation, List<StoredData> input, List<StoredData> output){
|
||||
StringBuffer sb = new StringBuffer();
|
||||
for (StoredData in:input){
|
||||
sb.append(dataToEntity(in));
|
||||
}
|
||||
for (StoredData out:output){
|
||||
sb.append(dataToEntity(out));
|
||||
}
|
||||
|
||||
String action = computationToAction(computation, sb.toString());
|
||||
String documentString = document.replace("#DOCUMENT#", action);
|
||||
documentString = formatXML(documentString);
|
||||
|
||||
return documentString;
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static String entity(String name, String value){
|
||||
return entity.replace("#ENTITY_NAME#", name).replace("#ENTITY_VALUE#", value).replace("#SUBENTITY#","");
|
||||
}
|
||||
public static String refentity(String id){
|
||||
return referenceEntity.replace("#ID#", id);
|
||||
}
|
||||
public static String refactivity(String id){
|
||||
return referenceActivity.replace("#ID#", id);
|
||||
}
|
||||
public static String attribute(String name){
|
||||
return attribute.replace("#NAME#", name);
|
||||
}
|
||||
|
||||
public static String entityWithTime(String name, String value,String time){
|
||||
return entity.replace("#ENTITY_NAME#", name).replace("#ENTITY_VALUE#", value).replace("#TIME#", time).replace("#SUBENTITY#","");
|
||||
}
|
||||
|
||||
public static String completeEntityWithTime(String name, String value,String time,String subEntity){
|
||||
return entity.replace("#ENTITY_NAME#", name).replace("#ENTITY_VALUE#", value).replace("#TIME#", time).replace("#SUBENTITY#",subEntity);
|
||||
}
|
||||
|
||||
public static String completeActivity(String id, String startTime,String endTime,String person, String subEntity){
|
||||
return activity.replace("#ID#", id).replace("#PERSON#", person).replace("#START_TIME#", startTime).replace("#END_TIME#", endTime).replace("#ENTITIES#",subEntity);
|
||||
}
|
||||
|
||||
|
||||
public static String formatXML(String input)
|
||||
{
|
||||
try
|
||||
{
|
||||
Transformer transformer = TransformerFactory.newInstance()
|
||||
.newTransformer();
|
||||
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
|
||||
transformer.setOutputProperty(
|
||||
"{http://xml.apache.org/xslt}indent-amount", "3");
|
||||
|
||||
StreamResult result = new StreamResult(new StringWriter());
|
||||
DOMSource source = new DOMSource(parseXml(input));
|
||||
transformer.transform(source, result);
|
||||
return result.getWriter().toString();
|
||||
} catch (Exception e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
return input;
|
||||
}
|
||||
}
|
||||
|
||||
private static org.w3c.dom.Document parseXml(String in)
|
||||
{
|
||||
try
|
||||
{
|
||||
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||
InputSource is = new InputSource(new StringReader(in));
|
||||
return db.parse(is);
|
||||
} catch (Exception e)
|
||||
{
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.dataspace;
|
||||
|
||||
public class StoredData {
|
||||
|
||||
public StoredData(String name, String description, String id, DataProvenance provenance, String creationDate, String operator, String computationId, StoredType type, String payload) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.id = id;
|
||||
this.description = description;
|
||||
this.provenance = provenance;
|
||||
this.creationDate = creationDate;
|
||||
this.operator = operator;
|
||||
this.computationId = computationId;
|
||||
this.type = type;
|
||||
this.payload=payload;
|
||||
}
|
||||
String name;
|
||||
String description;
|
||||
String id;
|
||||
DataProvenance provenance;
|
||||
String creationDate;
|
||||
String operator;
|
||||
String computationId;
|
||||
StoredType type;
|
||||
String payload;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue