From 645c869f0d0d2465641c3b502f9e109b864be71a Mon Sep 17 00:00:00 2001 From: Gianpaolo Coro Date: Thu, 26 Jul 2012 10:23:28 +0000 Subject: [PATCH] git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@56820 82a268e6-3cf1-43bd-a215-b396298e98cf --- cfg/generators.properties | 1 - ecologicalEngine.jardesc | 6 +- .../ecoengine/clustering/DBScan.java | 122 ++++++++++ .../configuration/AlgorithmConfiguration.java | 9 + .../ecoengine/datatypes/DatabaseType.java | 32 +++ .../ecoengine/datatypes/InputTable.java | 41 ++++ .../ecoengine/datatypes/OutputTable.java | 33 +++ .../ecoengine/datatypes/PrimitiveType.java | 50 ++++ .../ecoengine/datatypes/ServiceType.java | 34 +++ .../ecoengine/datatypes/StatisticalType.java | 62 +++++ .../enumtypes/DatabaseParameters.java | 11 + .../datatypes/enumtypes/PrimitiveTypes.java | 12 + .../enumtypes/ServiceParameters.java | 11 + .../datatypes/enumtypes/TableTemplates.java | 12 + .../evaluation/DiscrepancyAnalysis.java | 85 +++---- .../DistributionQualityAnalysis.java | 82 ++++--- .../evaluation/HabitatRepresentativeness.java | 67 ++++-- .../bioclimate/BioClimateAnalysis.java | 214 +++++++++++++----- .../bioclimate/InterpolateTables.java | 10 +- .../ecoengine/interfaces/Clusterer.java | 31 +++ .../interfaces/ComputationalAgent.java | 13 +- .../ecoengine/interfaces/DataAnalysis.java | 19 +- .../ecoengine/interfaces/Evaluator.java | 3 +- .../ecoengine/interfaces/Generator.java | 1 + .../ecoengine/interfaces/Model.java | 15 +- .../SpatialProbabilityDistribution.java | 11 +- .../SpatialProbabilityDistributionNode.java | 66 +++++- .../ecoengine/modeling/SimpleModeler.java | 15 +- .../ecoengine/models/ModelAquamapsNN.java | 74 +++--- .../ecoengine/models/ModelAquamapsNNNS.java | 72 +++--- .../ecoengine/models/ModelHSPEN.java | 83 ++++--- .../processing/LocalSimpleSplitGenerator.java | 18 +- .../processing/LocalSplitGenerator.java | 19 +- .../processing/RainyCloudGenerator.java | 32 ++- .../factories/EvaluatorsFactory.java | 12 +- .../factories/GeneratorsFactory.java | 7 +- .../processing/factories/ModelersFactory.java | 7 +- .../factories/ProcessorsFactory.java | 6 +- .../spatialdistributions/AquamapsNN.java | 32 ++- .../spatialdistributions/AquamapsNNNS.java | 32 ++- .../AquamapsNNSuitable.java | 32 ++- .../AquamapsSuitable.java | 76 +++++-- .../spatialdistributions/DummyAlgorithm.java | 32 ++- .../spatialdistributions/TestAlgorithm.java | 26 +-- .../ecoengine/test/TestsMetaInfo.java | 4 +- .../RegressionComplexGeneration.java | 4 +- .../test/regressions/TestEvaluation.java | 5 +- .../TestHSPECBioClimateAnalysisDev.java | 61 +++++ .../TestHSPECBioClimateAnalysisProd.java | 57 +++++ .../ecoengine/user/EvaluatorT.java | 6 +- .../ecoengine/utils/DatabaseFactory.java | 19 +- .../utils/TrainingSetsGenerator.java | 15 +- .../dataanalysis/ecoengine/utils/VARTYPE.java | 22 -- .../ecoengine/utils/VarCouple.java | 26 --- 54 files changed, 1297 insertions(+), 550 deletions(-) create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/DatabaseType.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/InputTable.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/OutputTable.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/PrimitiveType.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/ServiceType.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/DatabaseParameters.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/ServiceParameters.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/TableTemplates.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisDev.java create mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisProd.java delete mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/utils/VARTYPE.java delete mode 100644 src/main/java/org/gcube/dataanalysis/ecoengine/utils/VarCouple.java diff --git a/cfg/generators.properties b/cfg/generators.properties index 0752409..4efd8af 100644 --- a/cfg/generators.properties +++ b/cfg/generators.properties @@ -1,3 +1,2 @@ LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator -REMOTE_RAINYCLOUD=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator \ No newline at end of file diff --git a/ecologicalEngine.jardesc b/ecologicalEngine.jardesc index c8ea568..6d672da 100644 --- a/ecologicalEngine.jardesc +++ b/ecologicalEngine.jardesc @@ -1,6 +1,6 @@ - + - + @@ -11,6 +11,6 @@ - + diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java b/src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java new file mode 100644 index 0000000..d974cfc --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java @@ -0,0 +1,122 @@ +package org.gcube.dataanalysis.ecoengine.clustering; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer; +import org.gcube.dataanalysis.ecoengine.utils.Transformations; + +import com.rapidminer.example.ExampleSet; +import com.rapidminer.operator.IOContainer; +import com.rapidminer.operator.IOObject; +import com.rapidminer.operator.clustering.Cluster; +import com.rapidminer.operator.clustering.ClusterModel; +import com.rapidminer.tools.OperatorService; + +public class DBScan implements Clusterer{ + + AlgorithmConfiguration config; + String epsilon; + String minPoints; + ExampleSet points; + ArrayList> rows; + + public static void main(String[] args) { + // TODO Auto-generated method stub + + } + + @Override + public ALG_PROPS[] getSupportedAlgorithms() { + // TODO Auto-generated method stub + return null; + } + + @Override + public INFRASTRUCTURE getInfrastructure() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void init() throws Exception { + if (config!=null) + config.initRapidMiner(); + + } + + @Override + public void setConfiguration(AlgorithmConfiguration config) { + // TODO Auto-generated method stub + + } + + @Override + public void shutdown() { + // TODO Auto-generated method stub + + } + + + + protected void getSamples(double[][] sampleVectors) throws Exception{ + + points = Transformations.matrix2ExampleSet(sampleVectors); + + } + + @Override + public void cluster() throws Exception { + rows = new ArrayList>(); + com.rapidminer.operator.clustering.clusterer.DBScan clusterer = (com.rapidminer.operator.clustering.clusterer.DBScan) OperatorService.createOperator("DBScanClustering"); + clusterer.setParameter("local_random_seed", "-1"); + clusterer.setParameter("epsilon", epsilon); + clusterer.setParameter("min_points", minPoints); + clusterer.setParameter("add_cluster_attribute", "true"); + clusterer.setParameter("keep_example_set", "true"); + + IOContainer innerInput = new IOContainer(points); + IOContainer output = clusterer.apply(innerInput); + + IOObject[] outputvector = output.getIOObjects(); + + ClusterModel innermodel = (ClusterModel) outputvector[1]; + for (Cluster c : innermodel.getClusters()){ + c.getClusterId(); + } + + } + + + @Override + public float getStatus() { + // TODO Auto-generated method stub + return 0; + } + + @Override + public StatisticalType getOutput() { + // TODO Auto-generated method stub + return null; + } + + @Override + public List getInputParameters() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getDescription() { + // TODO Auto-generated method stub + return null; + } + + + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java index b047047..c104737 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java @@ -65,6 +65,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement private String cachePath; private String persistencePath; private String distributionTable; + private String tableSpace; private Boolean createTable = false; private Boolean useDB = true; @@ -202,6 +203,14 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement public void setEndpoints(List endpoints) { this.endpoints = endpoints; } + + public String getTableSpace() { + return tableSpace; + } + + public void setTableSpace(String tableSpace) { + this.tableSpace = tableSpace; + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/DatabaseType.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/DatabaseType.java new file mode 100644 index 0000000..a8e8bdb --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/DatabaseType.java @@ -0,0 +1,32 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; + +public class DatabaseType extends StatisticalType{ + + public DatabaseType(DatabaseParameters databaseParameter, String name, String description, String defaultValue, boolean optional) { + super(name, description, defaultValue, optional); + this.databaseParameter=databaseParameter; + } + + public DatabaseType(DatabaseParameters databaseParameter, String name, String description, String defaultValue) { + super(name, description, defaultValue); + this.databaseParameter=databaseParameter; + } + + public DatabaseType(DatabaseParameters databaseParameter, String name, String description) { + super(name, description); + this.databaseParameter=databaseParameter; + } + + protected DatabaseParameters databaseParameter; + + public DatabaseParameters getDatabaseParameter() { + return databaseParameter; + } + + public void setDatabaseParameter(DatabaseParameters databaseParameters) { + this.databaseParameter = databaseParameters; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/InputTable.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/InputTable.java new file mode 100644 index 0000000..6fab213 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/InputTable.java @@ -0,0 +1,41 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; + +public class InputTable extends StatisticalType{ + + protected List templateNames; + + public InputTable(List templateName, String name, String description, String defaultValue, boolean optional) { + super(name, description, defaultValue, optional); + this.templateNames=templateName; + } + + public InputTable(List templateName,String name, String description, String defaultValue) { + super(name, description, defaultValue); + this.templateNames=templateName; + } + + public InputTable(List templateName,String name, String description) { + super(name, description); + this.templateNames=templateName; + } + + + + public List getTemplateNames() { + return templateNames; + } + + public void setTemplateNames(List templateName) { + this.templateNames = templateName; + } + + public String getTableName(){ + return super.name; + } + + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/OutputTable.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/OutputTable.java new file mode 100644 index 0000000..a82998b --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/OutputTable.java @@ -0,0 +1,33 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import java.util.List; +import java.util.UUID; + +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; + +public class OutputTable extends InputTable{ + + public OutputTable(List templateName, String name, String tableName, String description, String defaultValue, boolean optional) { + super(templateName, name, description, defaultValue, optional); + this.tableName=tableName; + } + + public OutputTable(List templateName, String name, String tableName, String description, String defaultValue) { + super(templateName, name, description, defaultValue); + this.tableName=tableName; + } + + public OutputTable(List templateName, String name, String tableName, String description) { + super(templateName, name, description); + this.tableName=tableName; + } + + protected String tableName; + + public String getTableName(){ + return tableName; + } + + + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/PrimitiveType.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/PrimitiveType.java new file mode 100644 index 0000000..849db77 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/PrimitiveType.java @@ -0,0 +1,50 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; + + +public class PrimitiveType extends StatisticalType{ + + + public PrimitiveType(String className, Object content, PrimitiveTypes type, String name, String description, String defaultValue, boolean optional) { + super(name, description, defaultValue, optional); + this.className=className; + this.content=content; + this.type=type; + } + public PrimitiveType(String className, Object content, PrimitiveTypes type,String name, String description, String defaultValue) { + super(name, description, defaultValue); + this.className=className; + this.content=content; + this.type=type; + } + public PrimitiveType(String className, Object content, PrimitiveTypes type,String name, String description) { + super(name, description); + this.className=className; + this.content=content; + this.type=type; + } + + protected String className; + public String getClassName() { + return className; + } + public void setClassName(String className) { + this.className = className; + } + protected Object content; + public Object getContent() { + return content; + } + public void setContent(Object content) { + this.content = content; + } + protected PrimitiveTypes type; + public PrimitiveTypes getType() { + return type; + } + public void setType(PrimitiveTypes type) { + this.type = type; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/ServiceType.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/ServiceType.java new file mode 100644 index 0000000..ec09d20 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/ServiceType.java @@ -0,0 +1,34 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; + +public class ServiceType extends StatisticalType{ + + + public ServiceType(ServiceParameters serviceParameter,String name, String description, String defaultValue, boolean optional) { + super(name, description, defaultValue, optional); + this.serviceParameter = serviceParameter; + } + + public ServiceType(ServiceParameters serviceParameter,String name, String description, String defaultValue) { + super(name, description, defaultValue); + this.serviceParameter = serviceParameter; + } + + public ServiceType(ServiceParameters serviceParameter,String name, String description) { + super(name, description); + this.serviceParameter = serviceParameter; + } + + protected ServiceParameters serviceParameter; + + public ServiceParameters getServiceParameter() { + return serviceParameter; + } + + public void setServiceParameter(ServiceParameters serviceParameter) { + this.serviceParameter = serviceParameter; + } + + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java new file mode 100644 index 0000000..4e11471 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java @@ -0,0 +1,62 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +public class StatisticalType { + + protected String defaultValue; + protected String description; + protected String name; + + protected boolean optional; + + public StatisticalType(String name, String description,String defaultValue, boolean optional){ + this.name=name; + this.description=description; + this.defaultValue=defaultValue; + this.optional=optional; + } + + public StatisticalType(String name, String description,String defaultValue){ + this.name=name; + this.description=description; + this.defaultValue=defaultValue; + this.optional=true; + } + + public StatisticalType(String name, String description){ + this.name=name; + this.description=description; + this.defaultValue=""; + this.optional=true; + } + + + public String getDefaultValue() { + return defaultValue; + } + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + public String getDescription() { + return description; + } + public void setDescription(String description) { + this.description = description; + } + public boolean isOptional() { + return optional; + } + public void setOptional(boolean optional) { + this.optional = optional; + } + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString(){ + return name+":"+description+":"+defaultValue+":"+optional; + } +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/DatabaseParameters.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/DatabaseParameters.java new file mode 100644 index 0000000..42e73ae --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/DatabaseParameters.java @@ -0,0 +1,11 @@ +package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes; + +public enum DatabaseParameters { + + DATABASEUSERNAME, + DATABASEPASSWORD, + DATABASEURL, + DATABASEDRIVER, + DATABASEDIALECT + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java new file mode 100644 index 0000000..4c9cbca --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java @@ -0,0 +1,12 @@ +package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes; + +public enum PrimitiveTypes { + STRING, + NUMBER, + CONSTANT, + RANDOM, + FILE, + MAP, + BOOLEAN + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/ServiceParameters.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/ServiceParameters.java new file mode 100644 index 0000000..483b588 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/ServiceParameters.java @@ -0,0 +1,11 @@ +package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes; + +public enum ServiceParameters { + + EPR_LIST, + INFRA, + SERVICE, + USERNAME, + RANDOMSTRING + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/TableTemplates.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/TableTemplates.java new file mode 100644 index 0000000..5e368d9 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/TableTemplates.java @@ -0,0 +1,12 @@ +package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes; + +public enum TableTemplates { + + HSPEN, + HCAF, + HSPEC, + OCCURRENCE, + MINMAXLAT, + TRAININGSET, + TESTSET +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java index 91ffd55..7580eeb 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java @@ -3,16 +3,22 @@ package org.gcube.dataanalysis.ecoengine.evaluation; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Map; import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.Operations; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class DiscrepancyAnalysis extends DataAnalysis { @@ -42,41 +48,42 @@ public class DiscrepancyAnalysis extends DataAnalysis { private HashMap output; @Override - public HashMap getInputParameters() { - - HashMap parameters = new HashMap(); - parameters.put("FirstTable", new VarCouple(VARTYPE.STRING, "hspec1")); - parameters.put("SecondTable", new VarCouple(VARTYPE.STRING, "hspec2")); - parameters.put("FirstTableCsquareColumn", new VarCouple(VARTYPE.STRING, "csquare")); - parameters.put("SecondTableCsquareColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("FirstTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquare")); - parameters.put("SecondTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("ComparisonThreshold", new VarCouple(VARTYPE.STRING, "0.1")); - parameters.put("MaxSamples", new VarCouple(VARTYPE.STRING, "10000")); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - + public List getInputParameters() { + List parameters = new ArrayList(); + List templates = new ArrayList(); + templates.add(TableTemplates.HSPEC); + templates.add(TableTemplates.TRAININGSET); + templates.add(TableTemplates.TESTSET); + InputTable p1 = new InputTable(templates,"FirstTable","First Table"); + InputTable p2 = new InputTable(templates,"SecondTable","Second Table"); + PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FirstTableCsquareColumn","the csquares column name in the first table ","csquarecode"); + PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SecondTableCsquareColumn","the csquares column name in the second table","csquarecode"); + PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FirstTableProbabilityColumn","the probability column in the first table","probability"); + PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.NUMBER, "ComparisonThreshold","the comparison threshold","0.1"); + PrimitiveType p7 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.NUMBER, "MaxSamples","the comparison threshold","10000"); + DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name"); + DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password"); + DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver"); + DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url"); + DatabaseType p12 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect"); + + parameters.add(p1); + parameters.add(p2); + parameters.add(p3); + parameters.add(p4); + parameters.add(p5); + parameters.add(p6); + parameters.add(p7); + parameters.add(p8); + parameters.add(p9); + parameters.add(p10); + parameters.add(p11); + parameters.add(p12); + return parameters; } - @Override - public List getOutputParameters() { - - List outputs = new ArrayList(); - - outputs.add("MEAN"); - outputs.add("VARIANCE"); - outputs.add("NUMBER_OF_ERRORS"); - outputs.add("NUMBER_OF_COMPARISONS"); - outputs.add("ACCURACY"); - outputs.add("MAXIMUM_ERROR"); - outputs.add("MAXIMUM_ERROR_POINT"); - - return outputs; - - } + @Override public void init(AlgorithmConfiguration config) throws Exception { @@ -200,12 +207,8 @@ public class DiscrepancyAnalysis extends DataAnalysis { } @Override - public VARTYPE getContentType() { - return VARTYPE.MAP; - } - - @Override - public Object getContent() { - return output; + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "ErrorsAnalysis","Analysis of the discrepancies"); + return p; } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java index 34ce25d..d009c72 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java @@ -4,13 +4,19 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Map; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; import com.rapidminer.example.Attribute; @@ -38,28 +44,48 @@ public class DistributionQualityAnalysis extends DataAnalysis { double bestThreshold = 0.5d; private HashMap output; - public HashMap getInputParameters() { - - HashMap parameters = new HashMap(); - - parameters.put("PositiveCasesTable", new VarCouple(VARTYPE.STRING, "")); - parameters.put("NegativeCasesTable", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("PositiveCasesTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("NegativeCasesTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - - parameters.put("DistributionTable", new VarCouple(VARTYPE.STRING, "csquare")); - parameters.put("DistributionTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("DistributionTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - - parameters.put("PositiveThreshold", new VarCouple(VARTYPE.STRING, "0.8")); - parameters.put("NegativeThreshold", new VarCouple(VARTYPE.STRING, "0.3")); - - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); + public List getInputParameters() { + List parameters = new ArrayList(); + List templates = new ArrayList(); + templates.add(TableTemplates.HSPEC); + templates.add(TableTemplates.TRAININGSET); + templates.add(TableTemplates.TESTSET); + + List templatesOccurrences = new ArrayList(); + templatesOccurrences.add(TableTemplates.OCCURRENCE); + templatesOccurrences.add(TableTemplates.TRAININGSET); + templatesOccurrences.add(TableTemplates.TESTSET); + + InputTable p1 = new InputTable(templatesOccurrences,"PositiveCasesTable","A Table containing positive cases"); + InputTable p2 = new InputTable(templatesOccurrences,"NegativeCasesTable","A Table containing negative cases"); + PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "PositiveCasesTableKeyColumn","Positive Cases Table Key Column","csquarecode"); + PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "NegativeCasesTableKeyColumn","Negative Cases Table Key Column","csquarecode"); + InputTable p5 = new InputTable(templates,"DistributionTable","A probability distribution table"); + PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableKeyColumn","Distribution Table Key Column","csquarecode"); + PrimitiveType p7 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableProbabilityColumn","Distribution Table Probability Column","csquarecode"); + PrimitiveType p8 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "PositiveThreshold","Positive acceptance threshold","0.8"); + PrimitiveType p9 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "NegativeThreshold","Negative acceptance threshold","0.3"); + DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name"); + DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password"); + DatabaseType p12 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver"); + DatabaseType p13 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url"); + DatabaseType p14 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect"); + parameters.add(p1); + parameters.add(p2); + parameters.add(p3); + parameters.add(p4); + parameters.add(p5); + parameters.add(p6); + parameters.add(p7); + parameters.add(p8); + parameters.add(p9); + parameters.add(p10); + parameters.add(p11); + parameters.add(p12); + parameters.add(p13); + parameters.add(p14); + return parameters; } @@ -324,13 +350,9 @@ public class DistributionQualityAnalysis extends DataAnalysis { @Override - public VARTYPE getContentType() { - return VARTYPE.MAP; - } - - @Override - public Object getContent() { - return output; + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "AnalysisResult","Analysis of the probability distribution quality"); + return p; } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java index 2a96d28..2a9fd22 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java @@ -3,17 +3,23 @@ package org.gcube.dataanalysis.ecoengine.evaluation; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Map; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; import org.gcube.dataanalysis.ecoengine.models.cores.pca.PrincipalComponentAnalysis; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.Operations; import org.gcube.dataanalysis.ecoengine.utils.Transformations; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class HabitatRepresentativeness extends DataAnalysis { @@ -31,24 +37,39 @@ public class HabitatRepresentativeness extends DataAnalysis { private float innerstatus; private int maxTests = 2; - public HashMap getInputParameters() { + public List getInputParameters() { + List parameters = new ArrayList(); + List templates = new ArrayList(); + templates.add(TableTemplates.HCAF); + templates.add(TableTemplates.TRAININGSET); + templates.add(TableTemplates.TESTSET); - HashMap parameters = new HashMap(); - - parameters.put("ProjectingAreaTable", new VarCouple(VARTYPE.STRING, "")); - parameters.put("ProjectingAreaFeaturesOptionalCondition", new VarCouple(VARTYPE.STRING, "oceanarea>0")); - - parameters.put("PositiveCasesTable", new VarCouple(VARTYPE.STRING, "")); - parameters.put("NegativeCasesTable", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("FeaturesColumns", new VarCouple(VARTYPE.STRING, "")); - parameters.put("PositiveFeaturesColumns", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); + List templatesOccurrences = new ArrayList(); + templatesOccurrences.add(TableTemplates.OCCURRENCE); + templatesOccurrences.add(TableTemplates.TRAININGSET); + templatesOccurrences.add(TableTemplates.TESTSET); + + InputTable p1 = new InputTable(templates,"ProjectingAreaTable","A Table containing projecting area information"); + PrimitiveType p2 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "ProjectingAreaFeaturesOptionalCondition","optional filter for taking area rows","oceanarea>0",true); + InputTable p3 = new InputTable(templatesOccurrences,"PositiveCasesTable","A Table containing positive cases"); + InputTable p4 = new InputTable(templatesOccurrences,"NegativeCasesTable","A Table containing negative cases"); + PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FeaturesColumns","fetures columns names separated by comma","depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea"); + DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name"); + DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password"); + DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver"); + DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url"); + DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect"); + parameters.add(p1); + parameters.add(p2); + parameters.add(p3); + parameters.add(p4); + parameters.add(p5); + parameters.add(p6); + parameters.add(p7); + parameters.add(p8); + parameters.add(p9); + parameters.add(p10); return parameters; } @@ -315,13 +336,9 @@ public class HabitatRepresentativeness extends DataAnalysis { } @Override - public VARTYPE getContentType() { - return VARTYPE.MAP; - } - - @Override - public Object getContent() { - return output; + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "AnalysisResult","Habitat Representativeness Score"); + return p; } @Override diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java index 26d0106..436047e 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java @@ -4,11 +4,14 @@ import java.awt.Image; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.UUID; import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.Hspen; import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory; @@ -32,10 +35,10 @@ public class BioClimateAnalysis { private static String countNumberOfSpeciesPerRange = "select count(*) from %1$s where %4$s>%2$s and %4$s<=%3$s "; private static String countNumberOfSpecies = "select count(*) from %1$s where depthmin<%2$s and depthmin>=%3$s and depthmax<%4$s and depthmax>=%5$s"; private static String countProbabilityPerArea = "select count(*) from %1$s as a join hcaf_s as b on b.%2$s = %3$s and a.probability > #THRESHOLD# and a.csquarecode=b.csquarecode"; - + public static String salinityDefaultRange = "salinitymin>27.44 and salinitymin<=36.57"; public static String salinityMinFeature = "salinitymin"; - + private static enum FIELD { iceconann, sstanmean, salinitymean }; @@ -90,7 +93,7 @@ public class BioClimateAnalysis { bioClimate.produceGraphs2D(); } - private static String[] SERIES = { "High Probability Cells Trend (>%1$s)", "Number of Changing Cells", "Reducing Ice Concentration Trend", "Average Discrepancy Between Distributions", "Average Trends", "Ice Concentration", "Sea Surface Temperature", "Salinity" }; + private static String[] SERIES = { "High Probability Cells Trend (>%1$s)", "Number of Changing Cells", "Reducing Ice Concentration Trend", "High Probability Cells Trend Derivative", "Average Trends", "Ice Concentration", "Sea Surface Temperature", "Salinity" }; public void produceGraphs2D() throws Exception { DefaultCategoryDataset testpoints = new DefaultCategoryDataset(); @@ -107,18 +110,18 @@ public class BioClimateAnalysis { lineg9.render(testpoints); } - private HashMap producedImages; + private HashMap producedImages; - public HashMap getProducedImages() { + public HashMap getProducedImages() { return producedImages; } private void produceGraphs(String[] csquareTableNames, String[] hspecTableNames, float threshold) throws Exception { - + csquareTableNames = checkTableNames(csquareTableNames); hspecTableNames = checkTableNames(hspecTableNames); - - producedImages = new HashMap (); + + producedImages = new HashMap(); int numberOfTrends = highProbabilityCells.length; // create the datasets... @@ -149,14 +152,14 @@ public class BioClimateAnalysis { discrepancies[0] = min; if (liveRender) { - BioClimateGraph lineg1 = new BioClimateGraph(String.format(SERIES[0],threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells)); + BioClimateGraph lineg1 = new BioClimateGraph(String.format(SERIES[0], threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells)); BioClimateGraph lineg4 = new BioClimateGraph(SERIES[3], Operations.getMax(discrepancies), min); lineg4.render(discrepanciesTrend); lineg1.render(probabilityTrend); } - producedImages.put("Probability_Trend",BioClimateGraph.renderStaticImgObject(width, height, probabilityTrend, String.format(SERIES[0],threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells))); - producedImages.put("Probability_Discrepancies_Trend",BioClimateGraph.renderStaticImgObject(width, height, discrepanciesTrend, SERIES[3], Operations.getMax(discrepancies), min)); + producedImages.put("Probability_Trend", BioClimateGraph.renderStaticImgObject(width, height, probabilityTrend, String.format(SERIES[0], threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells))); + producedImages.put("Probability_Discrepancies_Trend", BioClimateGraph.renderStaticImgObject(width, height, discrepanciesTrend, SERIES[3], Operations.getMax(discrepancies), min)); } if (doHcafAn) { @@ -170,9 +173,9 @@ public class BioClimateAnalysis { lineg8.render(avgSalinityD); } - producedImages.put("Average_Ice_Concentration",BioClimateGraph.renderStaticImgObject(width, height, avgIceD, SERIES[5], Operations.getMax(avgIce), Operations.getMin(avgIce))); - producedImages.put("Average_SST",BioClimateGraph.renderStaticImgObject(width, height, avgSSTD, SERIES[6], Operations.getMax(avgSST), Operations.getMin(avgSST))); - producedImages.put("Average_Salinity",BioClimateGraph.renderStaticImgObject(width, height, avgSalinityD, SERIES[7], Operations.getMax(avgSalinity), Operations.getMin(avgSalinity))); + producedImages.put("Average_Ice_Concentration", BioClimateGraph.renderStaticImgObject(width, height, avgIceD, SERIES[5], Operations.getMax(avgIce), Operations.getMin(avgIce))); + producedImages.put("Average_SST", BioClimateGraph.renderStaticImgObject(width, height, avgSSTD, SERIES[6], Operations.getMax(avgSST), Operations.getMin(avgSST))); + producedImages.put("Average_Salinity", BioClimateGraph.renderStaticImgObject(width, height, avgSalinityD, SERIES[7], Operations.getMax(avgSalinity), Operations.getMin(avgSalinity))); } @@ -181,37 +184,37 @@ public class BioClimateAnalysis { } public void hcafEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception { - globalEvolutionAnalysis(hcafTable, null, hcafTableNames, null, null, null,0f); + globalEvolutionAnalysis(hcafTable, null, hcafTableNames, null, null, null, 0f); } - public void hspecEvolutionAnalysis(String[] hspecTables, String[] hspecTableNames, String probabilityColumn, String csquareColumn,float threshold) throws Exception { + public void hspecEvolutionAnalysis(String[] hspecTables, String[] hspecTableNames, String probabilityColumn, String csquareColumn, float threshold) throws Exception { globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, probabilityColumn, csquareColumn, threshold); } - private String[] checkTableNames(String [] tablesNames){ + private String[] checkTableNames(String[] tablesNames) { ArrayList newtables = new ArrayList(); - if ((tablesNames==null) || (tablesNames.length==0)) + if ((tablesNames == null) || (tablesNames.length == 0)) return tablesNames; - for (String table:tablesNames){ - int i=1; + for (String table : tablesNames) { + int i = 1; String originalTable = table; - while (newtables.contains(table)){ - table = originalTable+"_"+i; + while (newtables.contains(table)) { + table = originalTable + "_" + i; i++; } newtables.add(table); } - String [] tables = new String[tablesNames.length]; - for (int j=0;j> GeoMap, String[] tablesNames) { // produce a char for each feature tablesNames = checkTableNames(tablesNames); - producedImages = new HashMap (); + producedImages = new HashMap(); for (String featurename : GeoMap.keySet()) { DefaultCategoryDataset chart = new DefaultCategoryDataset(); HashMap timeseries = GeoMap.get(featurename); @@ -231,13 +234,13 @@ public class BioClimateAnalysis { BioClimateGraph lineg1 = new BioClimateGraph(featurename, absmax, absmin); lineg1.render(chart); } - producedImages.put(featurename.replace(" ", "_"),BioClimateGraph.renderStaticImgObject(width, height, chart, featurename, absmax, absmin)); + producedImages.put(featurename.replace(" ", "_"), BioClimateGraph.renderStaticImgObject(width, height, chart, featurename, absmax, absmin)); } } /** - * Generates a chart for hspens in time according to a certain interval in the parameter - * e.g. : a chart for several salinity intervals + * Generates a chart for hspens in time according to a certain interval in the parameter e.g. : a chart for several salinity intervals + * * @param hspenTables * @param hspenTableNames * @param parameterName @@ -291,9 +294,9 @@ public class BioClimateAnalysis { // for each cluster build up a chart for (int j = 1; j < pClusters.length; j++) { - double prevpmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j - 1])),2); - pmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j])),2); - + double prevpmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j - 1])), 2); + pmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j])), 2); + if (prevpmax != pmax) { // take the number of elements for this range String countSpeciesQuery = String.format(countNumberOfSpeciesPerRange, hspenTables[i], prevpmax, pmax, parameterName); @@ -340,11 +343,13 @@ public class BioClimateAnalysis { /** * Generates a chart for hspec probability > thr in each Fao Area and LME + * * @param hspecTables * @param hspecTablesNames * @throws Exception */ - public void speciesGeographicEvolutionAnalysis(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception { + + public void speciesGeographicEvolutionAnalysis2(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception { try { referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); AnalysisLogger.getLogger().debug("ReferenceDB initialized"); @@ -356,24 +361,23 @@ public class BioClimateAnalysis { // a map for each feature. each sub map contains a trend for faoaream, lme etc. HashMap> GeoMap = new HashMap>(); - + float statusstep = 80f / (float) numbOfTables; // for each table for (int i = 0; i < numbOfTables; i++) { // for each criterion to apply: fao area, lme etc. for (int j = 0; j < criteriaNames.length; j++) { - List listCriterion = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements("hcaf_s", selectionCriteria[j],criteriaFilters[j]),referencedbConnection); - for (Object code: listCriterion){ - String code$ = ""+code; - String query = String.format(countProbabilityPerArea,hspecTables[i],selectionCriteria[j],code$); - query = query.replace("#THRESHOLD#", ""+threshold); - AnalysisLogger.getLogger().trace("Executing query for counting probabilities: "+query); + List listCriterion = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements("hcaf_s", selectionCriteria[j], criteriaFilters[j]), referencedbConnection); + for (Object code : listCriterion) { + String code$ = "" + code; + String query = String.format(countProbabilityPerArea, hspecTables[i], selectionCriteria[j], code$); + query = query.replace("#THRESHOLD#", "" + threshold); + AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + query); List counts = DatabaseFactory.executeSQLQuery(query, referencedbConnection); AnalysisLogger.getLogger().trace("Query Executed"); - int countPerArea = (counts==null)?0:Integer.parseInt(""+counts.get(0)); - - String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; - // put the code and the value in the timeseries associated to the feature name + int countPerArea = (counts == null) ? 0 : Integer.parseInt("" + counts.get(0)); + + String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; // put the code and the value in the timeseries associated to the feature name HashMap submap = GeoMap.get(chartName); if (submap == null) { submap = new HashMap(); @@ -386,9 +390,9 @@ public class BioClimateAnalysis { elements = new double[numbOfTables]; submap.put(timeseries, elements); } - + elements[i] = countPerArea; - + } } @@ -406,13 +410,97 @@ public class BioClimateAnalysis { referencedbConnection.close(); } } - -/** - * Generates a geographic trend for each hspec feature: ice con, salinity, sst in each fao area - * @param hcafTable - * @param hcafTableNames - * @throws Exception - */ + + public void speciesGeographicEvolutionAnalysis(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception { + try { + referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); + AnalysisLogger.getLogger().debug("ReferenceDB initialized"); + status = 0f; + + int numbOfTables = (hspecTables != null) ? hspecTables.length : 0; + + if (numbOfTables > 0) { + + // a map for each feature. each sub map contains a trend for faoaream, lme etc. + HashMap> GeoMap = new HashMap>(); + + float statusstep = 80f / (float) numbOfTables; + // for each table + for (int i = 0; i < numbOfTables; i++) { + String tmpanalysisTable = "tmpanalysis" + ("" + UUID.randomUUID()).replace("-", "").replace("_", ""); + try { + DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection); + } catch (Exception ee) { + AnalysisLogger.getLogger().trace("table " + tmpanalysisTable + " does not exist"); + } + String preparationQuery = "create table " + tmpanalysisTable + " as select a.faoaream, lme,count(*) from %1$s as a where a.probability > #THRESHOLD# group by faoaream,lme;"; + preparationQuery = String.format(preparationQuery, hspecTables[i]); + preparationQuery = preparationQuery.replace("#THRESHOLD#", "" + threshold); + AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + preparationQuery); + + DatabaseFactory.executeSQLUpdate(preparationQuery, referencedbConnection); + + AnalysisLogger.getLogger().trace("Query Executed"); + + // for each criterion to apply: fao area, lme etc. + for (int j = 0; j < criteriaNames.length; j++) { + String criteriaQuery = String.format("select %1$s,sum(count) from " + tmpanalysisTable + " %2$s group by %1$s;", selectionCriteria[j], criteriaFilters[j]); + AnalysisLogger.getLogger().trace("Executing query for counting probabilities: " + criteriaQuery); + List codeSums = DatabaseFactory.executeSQLQuery(criteriaQuery, referencedbConnection); + for (Object codeSum : codeSums) { + String code$ = "" + ((Object[]) codeSum)[0]; + int countPerArea = (((Object[]) codeSum)[1] == null) ? 0 : Integer.parseInt("" + ((Object[]) codeSum)[1]); + AnalysisLogger.getLogger().trace("Analyzing " + selectionCriteria[j] + " with code " + code$ + " count " + countPerArea); + + String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; + // put the code and the value in the timeseries associated to the feature name + HashMap submap = GeoMap.get(chartName); + if (submap == null) { + submap = new HashMap(); + GeoMap.put(chartName, submap); + } + String timeseries = "number of occupied cells"; + double[] elements = submap.get(timeseries); + if (elements == null) { + elements = new double[numbOfTables]; + submap.put(timeseries, elements); + } + + elements[i] = countPerArea; + + } + + } + + try { + DatabaseFactory.executeSQLUpdate("drop table " + tmpanalysisTable, referencedbConnection); + } catch (Exception ee) { + ee.printStackTrace(); + AnalysisLogger.getLogger().trace("table " + tmpanalysisTable + " does not exist"); + } + + status = status + statusstep; + } + + status = 80f; + produceCharts(GeoMap, hspecTablesNames); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + status = 100f; + referencedbConnection.close(); + } + } + + /** + * Generates a geographic trend for each hspec feature: ice con, salinity, sst in each fao area + * + * @param hcafTable + * @param hcafTableNames + * @throws Exception + */ public void geographicEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception { try { referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); @@ -482,8 +570,8 @@ public class BioClimateAnalysis { } /** - * Generates a chart for each hspec feature - * Generates a chart for hspec prob > thr and performs a discrepancy analysis on hspec + * Generates a chart for each hspec feature Generates a chart for hspec prob > thr and performs a discrepancy analysis on hspec + * * @param hcafTable * @param hspecTables * @param hcafTablesNames @@ -492,7 +580,7 @@ public class BioClimateAnalysis { * @param csquareColumn * @throws Exception */ - public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables, String[] hcafTablesNames, String[] hspecTableNames, String probabilityColumn, String csquareColumn,float threshold) throws Exception { + public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables, String[] hcafTablesNames, String[] hspecTableNames, String probabilityColumn, String csquareColumn, float threshold) throws Exception { try { referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); AnalysisLogger.getLogger().debug("ReferenceDB initialized"); @@ -517,7 +605,10 @@ public class BioClimateAnalysis { avgSalinity = new double[numbOfPoints]; float statusstep = 80f / (float) numbOfPoints; - +// create temp table puppa as select count(*), probability > 0.8 as aboveThreshold, sum(probability) as partialprobability from hspec_2050_suitable group by probability >0.8; +// select count(*), probability > 0.8 as aboveThreshold, sum(probability) as partialprobability from hspec_2050_suitable group by probability >0.8; +// select sum(count) as count, 3 as x from puppa union select count,2 as x from puppa where abovethreshold = true union select sum(partialprobability) as count, 1 as x from puppa order by x desc; + for (int i = 0; i < numbOfPoints; i++) { if (doHspecAn) highProbabilityCells[i] = calcHighProbabilityCells(hspecTables[i], threshold); @@ -534,7 +625,8 @@ public class BioClimateAnalysis { discrepancies[i] = 1.0; } else { // OLD CALCULATION discrepancies[i] = MathFunctions.roundDecimal(calcDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5); - discrepancies[i] = MathFunctions.roundDecimal(calcOverDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5); +// discrepancies[i] = MathFunctions.roundDecimal(calcOverDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5); + discrepancies[i] = highProbabilityCells[i]-highProbabilityCells[i-1]; } AnalysisLogger.getLogger().trace("(" + hspecTables[i] + "): DISCREPANCY " + discrepancies[i] + " HIGH PROB CELLS " + highProbabilityCells[i]); } @@ -544,7 +636,7 @@ public class BioClimateAnalysis { status = status + statusstep; } status = 80f; - produceGraphs(hcafTablesNames, hspecTableNames,threshold); + produceGraphs(hcafTablesNames, hspecTableNames, threshold); } catch (Exception e) { e.printStackTrace(); @@ -653,7 +745,9 @@ public class BioClimateAnalysis { config.setParam("MaxSamples", "" + 30000); eval = EvaluatorsFactory.getEvaluators(config).get(0); - HashMap out = eval.process(config); + PrimitiveType output = (PrimitiveType) eval.process(config); + + HashMap out = (HashMap)output.getContent(); Double d = Double.parseDouble(out.get("MEAN")); return d; diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/InterpolateTables.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/InterpolateTables.java index d30d61e..043aed1 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/InterpolateTables.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/InterpolateTables.java @@ -212,6 +212,7 @@ public class InterpolateTables { String filename = temporaryDirectory + initialFile + "_" + (yearCals) + "_" + function.name() + "_" + i + System.currentTimeMillis() + ".csv"; FileTools.saveString(filename, completeFile.toString(), true, "UTF-8"); producedfiles[i] = new File(filename); + System.out.println("PRODUCED FILE TO COPY "+producedfiles[i]); } } @@ -267,13 +268,18 @@ public class InterpolateTables { public static void main(String[] args) throws Exception { String configPath = "./cfg/"; - String persistencePath = "c:/tmp/"; + String persistencePath = "/win/"; + /* String databaseUrl = "jdbc:postgresql://localhost/testdb"; String databaseUser = "gcube"; String databasePassword = "d4science2"; + */ + String databaseUrl = "jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated"; + String databaseUser = "utente"; + String databasePassword = "d4science"; InterpolateTables interp = new InterpolateTables(configPath, persistencePath, databaseUrl, databaseUser, databasePassword); - interp.interpolate("hcaf_d", "hcaf_d_2050", 5, INTERPOLATIONFUNCTIONS.LINEAR, 2012, 2050); + interp.interpolate("hcaf_d", "hcaf_d_2050", 7, INTERPOLATIONFUNCTIONS.LINEAR, 2012, 2050); } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java new file mode 100644 index 0000000..0ea09ed --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java @@ -0,0 +1,31 @@ +package org.gcube.dataanalysis.ecoengine.interfaces; + +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; + +public interface Clusterer { + + public ALG_PROPS[] getSupportedAlgorithms(); + + public INFRASTRUCTURE getInfrastructure(); + + public void init() throws Exception; + + public void setConfiguration(AlgorithmConfiguration config); + + public void shutdown(); + + public float getStatus(); + + public String getDescription(); + + public List getInputParameters(); + + public StatisticalType getOutput(); + + public void cluster() throws Exception; +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java index 973ad9a..3d13aa0 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java @@ -1,15 +1,14 @@ package org.gcube.dataanalysis.ecoengine.interfaces; -import java.util.HashMap; +import java.util.List; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; public interface ComputationalAgent { //set the input parameters for this generator - public HashMap getInputParameters(); + public List getInputParameters(); public String getResourceLoad(); @@ -20,10 +19,8 @@ public interface ComputationalAgent { //gets the weight of the generator: according to this the generator will be placed in the execution order public INFRASTRUCTURE getInfrastructure(); - // gets the type of the content inside the generator: String, File, HashMap. - public VARTYPE getContentType(); - // gets the content of the model: e.g. Table indications etc. - public Object getContent(); + public StatisticalType getOutput(); + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java index 4d729d9..8351167 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java @@ -2,11 +2,14 @@ package org.gcube.dataanalysis.ecoengine.interfaces; import java.util.HashMap; import java.util.List; +import java.util.Map; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; /** * Implements a mono-thread data analysis process @@ -21,15 +24,11 @@ public abstract class DataAnalysis implements Evaluator{ protected float status; /** - * establishes imput parameters for this algorithm along with their type + * establishes input parameters for this algorithm along with their type */ - public abstract HashMap getInputParameters(); + public abstract List getInputParameters(); - /** - * lists the output parameters names - * @return - */ - public abstract List getOutputParameters(); + /** * Executed the core of the algorithm @@ -58,7 +57,7 @@ public abstract class DataAnalysis implements Evaluator{ * @return * @throws Exception */ - public HashMap process(AlgorithmConfiguration config) throws Exception{ + public StatisticalType process(AlgorithmConfiguration config) throws Exception{ status = 0; HashMap out = new HashMap(); try{ @@ -72,7 +71,7 @@ public abstract class DataAnalysis implements Evaluator{ finally{ status = 100; } - return out; + return new PrimitiveType(Map.class.getName(), out, PrimitiveTypes.MAP, "Analysis","Analysis Results"); } /** diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java index c490165..5c915ad 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java @@ -3,12 +3,13 @@ package org.gcube.dataanalysis.ecoengine.interfaces; import java.util.HashMap; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; public interface Evaluator extends ComputationalAgent{ - public HashMap process(AlgorithmConfiguration config) throws Exception; + public StatisticalType process(AlgorithmConfiguration config) throws Exception; public abstract void init(AlgorithmConfiguration config) throws Exception; diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java index 9eea11e..7a676d2 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java @@ -20,4 +20,5 @@ public interface Generator extends ComputationalAgent{ public void generate() throws Exception; + public SpatialProbabilityDistribution getAlgorithm(); } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java index 9fc6f04..c527ee7 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java @@ -1,11 +1,10 @@ package org.gcube.dataanalysis.ecoengine.interfaces; -import java.util.HashMap; +import java.util.List; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; public interface Model { @@ -20,7 +19,7 @@ public interface Model { public String getDescription(); //set the input parameters for this generator - public HashMap getInputParameters(); + public List getInputParameters(); public float getVersion(); @@ -34,17 +33,11 @@ public interface Model { public float getStatus(); - public String getInputType(); - - public String getOutputType(); - public void postprocess(AlgorithmConfiguration Input, Model previousModel); public void train(AlgorithmConfiguration Input, Model previousModel); public void stop(); - public VARTYPE getContentType(); - - public Object getContent(); + public StatisticalType getOutput(); } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistribution.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistribution.java index 05d3eb8..623e024 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistribution.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistribution.java @@ -1,10 +1,9 @@ package org.gcube.dataanalysis.ecoengine.interfaces; -import java.util.HashMap; +import java.util.List; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; public interface SpatialProbabilityDistribution { @@ -18,10 +17,8 @@ public interface SpatialProbabilityDistribution { public String getDescription(); //set the input parameters for this generator - public HashMap getInputParameters(); + public List getInputParameters(); - public VARTYPE getContentType(); - - public Object getContent(); + public StatisticalType getOutput(); } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionNode.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionNode.java index 5a52b0d..59bfec1 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionNode.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionNode.java @@ -1,34 +1,80 @@ package org.gcube.dataanalysis.ecoengine.interfaces; +import java.io.File; +import java.io.FileWriter; + import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDistribution { +public abstract class SpatialProbabilityDistributionNode implements SpatialProbabilityDistribution { //initialization of ta single node - public void initSingleNode(AlgorithmConfiguration config); + public abstract void initSingleNode(AlgorithmConfiguration config); //get the internal processing status for the single step calculation - public float getInternalStatus(); + public abstract float getInternalStatus(); //execute a single node - public int executeNode(int cellStarIndex, int numberOfCellsToProcess, int speciesStartIndex, int numberOfSpeciesToProcess, String sandboxFolder, String logfileNameToProduce); + public abstract int executeNode(int cellStarIndex, int numberOfCellsToProcess, int speciesStartIndex, int numberOfSpeciesToProcess, boolean duplicate, String sandboxFolder, String nodeConfigurationFileObject, String logfileNameToProduce); // An initialization phase in which the inputs are initialized - public void setup(AlgorithmConfiguration config) throws Exception; + public abstract void setup(AlgorithmConfiguration config) throws Exception; //get overall number of species to process - public int getNumberOfSpecies(); + public abstract int getNumberOfSpecies(); //get overall number of geographical information to process - public int getNumberOfGeoInfo(); + public abstract int getNumberOfGeoInfo(); //get overall number of processed species - public int getNumberOfProcessedSpecies(); + public abstract int getNumberOfProcessedSpecies(); //stop the sexecution of the node - public void stop(); + public abstract void stop(); //prostprocess after the whole calculation : reduce operation - public void postProcess(); + public abstract void postProcess(boolean manageDuplicates, boolean manageFault); + public static void main(String[] args) throws Exception{ + try{ + System.out.println("Generic Node: Process Started "); + try { + for (int i = 0; i < args.length; i++) { + System.out.println("Generic Node: RECEIVED INPUT " + args[i]); + } + } catch (Exception e) { + } + + System.out.println("Generic Node: checking arguments from "+args[0]); + String[] rargs = args[0].split("_"); + + int order = Integer.parseInt(rargs[0]); + System.out.println("Generic Node: order: " + order); + int chunksize = Integer.parseInt(rargs[1]); + System.out.println("Generic Node: chunk: " + chunksize); + int speciesOrder = Integer.parseInt(rargs[2]); + System.out.println("Generic Node: species: " + speciesOrder); + int speciesChunksize = Integer.parseInt(rargs[3]); + System.out.println("Generic Node: species chunk size: " + speciesChunksize); + String path = rargs[4]; + System.out.println("Generic Node: path: " + path); + String algorithmClass = rargs[5]; + System.out.println("Generic Node: algorithmClass: " + algorithmClass); + Boolean duplicate = Boolean.parseBoolean(rargs[6]); + System.out.println("Generic Node: duplicate message: " + duplicate); + String nodeConfiguration = rargs[7]; + System.out.println("Generic Node: config: " + nodeConfiguration); + String logfile = args[1]; + System.out.println("Generic Node: logfile: " + logfile); + + System.out.println("Generic Node: executing class"); + + SpatialProbabilityDistributionNode node = (SpatialProbabilityDistributionNode) Class.forName(algorithmClass).newInstance(); + + node.executeNode(order, chunksize, speciesOrder, speciesChunksize, duplicate, path, nodeConfiguration, logfile); + }catch(Exception e){ + System.out.println("ERROR "+e.getMessage()); + System.out.println(e); + } + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java b/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java index 709c887..e845765 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java @@ -1,14 +1,13 @@ package org.gcube.dataanalysis.ecoengine.modeling; -import java.util.HashMap; +import java.util.List; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class SimpleModeler implements Modeler{ private Model innermodel; @@ -64,7 +63,7 @@ public class SimpleModeler implements Modeler{ } @Override - public HashMap getInputParameters() { + public List getInputParameters() { return innermodel.getInputParameters(); } @@ -73,12 +72,8 @@ public class SimpleModeler implements Modeler{ return INFRASTRUCTURE.LOCAL; } - public VARTYPE getContentType() { - return innermodel.getContentType(); - } - - public Object getContent() { - return innermodel.getContent(); + public StatisticalType getOutput() { + return innermodel.getOutput(); } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNN.java b/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNN.java index 26d2893..aeda7cb 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNN.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNN.java @@ -4,17 +4,26 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Map; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class ModelAquamapsNN implements Model { @@ -36,18 +45,34 @@ public class ModelAquamapsNN implements Model { } @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); - parameters.put("AbsenceDataTable", new VarCouple(VARTYPE.STRING, "absence_data")); - parameters.put("PresenceDataTable", new VarCouple(VARTYPE.STRING, "presence_data")); - parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, "")); - parameters.put("LayersNeurons", new VarCouple(VARTYPE.STRING, "100,2")); + public List getInputParameters() { + List parameters = new ArrayList(); + List templatesOccurrences = new ArrayList(); + templatesOccurrences.add(TableTemplates.OCCURRENCE); + + InputTable p1 = new InputTable(templatesOccurrences,"AbsenceDataTable","A Table containing absence points"); + InputTable p2 = new InputTable(templatesOccurrences,"PresenceDataTable","A Table containing positive occurrences"); + PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Species Code of the fish the NN will correspond to","Fis-10407"); + PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "LayersNeurons","a list of neurons number for each inner layer separated by comma","100,2"); - parameters.put("UserName", new VarCouple(VARTYPE.SERVICE, "")); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); + DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name"); + DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password"); + DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver"); + DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url"); + DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect"); + + ServiceType p10 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username"); + + parameters.add(p1); + parameters.add(p2); + parameters.add(p3); + parameters.add(p4); + parameters.add(p5); + parameters.add(p6); + parameters.add(p7); + parameters.add(p8); + parameters.add(p9); + parameters.add(p10); return parameters; } @@ -119,15 +144,7 @@ public class ModelAquamapsNN implements Model { return status; } - @Override - public String getInputType() { - return AlgorithmConfiguration.class.getName(); - } - @Override - public String getOutputType() { - return File.class.getName(); - } @Override public void postprocess(AlgorithmConfiguration Input, Model previousModel) { @@ -182,15 +199,14 @@ public class ModelAquamapsNN implements Model { status = 100f; } - public VARTYPE getContentType() { - return VARTYPE.FILE; + + + @Override + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(File.class.getName(), new File(fileName), PrimitiveTypes.FILE, "NeuralNetwork","Trained Neural Network"); + return p; } - - public Object getContent() { - return new File(fileName); - - } - + @Override public void stop() { diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNNNS.java b/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNNNS.java index 0d250e2..d9fee1b 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNNNS.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNNNS.java @@ -4,19 +4,25 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; -import java.util.HashMap; +import java.util.ArrayList; import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.Model; -import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.NeuralNet; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.Pattern; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class ModelAquamapsNNNS implements Model { @@ -38,20 +44,36 @@ public class ModelAquamapsNNNS implements Model { } @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); - parameters.put("AbsenceDataTable", new VarCouple(VARTYPE.STRING, "absence_data")); - parameters.put("PresenceDataTable", new VarCouple(VARTYPE.STRING, "presence_data")); - parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, "")); - parameters.put("UserName", new VarCouple(VARTYPE.STRING, "")); + public List getInputParameters() { + List parameters = new ArrayList(); + List templatesOccurrences = new ArrayList(); + templatesOccurrences.add(TableTemplates.OCCURRENCE); + + InputTable p1 = new InputTable(templatesOccurrences,"AbsenceDataTable","A Table containing absence points"); + InputTable p2 = new InputTable(templatesOccurrences,"PresenceDataTable","A Table containing positive occurrences"); + PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Species Code of the fish the NN will correspond to","Fis-10407"); + PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "LayersNeurons","a list of neurons number for each inner layer separated by comma","100,2"); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); + DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name"); + DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password"); + DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver"); + DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url"); + DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect"); + + ServiceType p10 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username"); + + parameters.add(p1); + parameters.add(p2); + parameters.add(p3); + parameters.add(p4); + parameters.add(p5); + parameters.add(p6); + parameters.add(p7); + parameters.add(p8); + parameters.add(p9); + parameters.add(p10); return parameters; } - @Override public float getVersion() { return 0; @@ -109,15 +131,7 @@ public class ModelAquamapsNNNS implements Model { return status; } - @Override - public String getInputType() { - return AlgorithmConfiguration.class.getName(); - } - @Override - public String getOutputType() { - return String.class.getName(); - } @Override public void postprocess(AlgorithmConfiguration Input, Model previousModel) { @@ -187,14 +201,12 @@ public class ModelAquamapsNNNS implements Model { status = 100f; } - public VARTYPE getContentType() { - return VARTYPE.FILE; - } - - public Object getContent() { - return fileName; - + @Override + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(File.class.getName(), new File(fileName), PrimitiveTypes.FILE, "NeuralNetwork","Trained Neural Network"); + return p; } + @Override public void stop() { diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelHSPEN.java b/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelHSPEN.java index 43fc226..0a71249 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelHSPEN.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/models/ModelHSPEN.java @@ -13,12 +13,20 @@ import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.AquamapsEnvelopeAlgorithm; import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.EnvelopeSet; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class ModelHSPEN implements Model { @@ -57,6 +65,7 @@ public class ModelHSPEN implements Model { private long lastTime; AlgorithmConfiguration outconfig; private String outputTable; + private String outputTableLabel; @Override public float getVersion() { @@ -90,6 +99,7 @@ public class ModelHSPEN implements Model { } outputTable = outconfig.getParam("OuputEnvelopeTable"); + outputTableLabel = outconfig.getParam("OuputEnvelopeTableLabel"); // initialize queries dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable")); dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable")); @@ -335,14 +345,16 @@ public class ModelHSPEN implements Model { // take ending time } - public VARTYPE getContentType() { - return VARTYPE.HSPEN; - } - public Object getContent() { - return outputTable; + @Override + public StatisticalType getOutput() { + List templateHspen = new ArrayList(); + templateHspen.add(TableTemplates.HSPEN); + OutputTable p = new OutputTable(templateHspen,outputTableLabel,outputTable,"Output hspen table"); + return p; } - + + @Override public void setVersion(float version) { this.version = version; @@ -404,16 +416,7 @@ public class ModelHSPEN implements Model { return status; } - @Override - public String getInputType() { - return AlgorithmConfiguration.class.getName(); - } - - @Override - public String getOutputType() { - return String.class.getName(); - } - + @Override public ALG_PROPS[] getProperties() { ALG_PROPS[] props = {ALG_PROPS.SPECIES_ENVELOPES}; @@ -426,17 +429,39 @@ public class ModelHSPEN implements Model { } @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); - parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen")); - parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING,"hcaf_d")); - parameters.put("OccurrenceCellsTable", new VarCouple(VARTYPE.STRING,"occurrencecells")); - parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true")); - parameters.put("OuputEnvelopeTable", new VarCouple(VARTYPE.RANDOM,"hspen_")); - parameters.put("DatabaseUserName",new VarCouple(VARTYPE.DATABASEUSERNAME,"")); - parameters.put("DatabasePassword",new VarCouple(VARTYPE.DATABASEPASSWORD,"")); - parameters.put("DatabaseURL",new VarCouple(VARTYPE.DATABASEURL,"")); - parameters.put("DatabaseDriver",new VarCouple(VARTYPE.DATABASEDRIVER,"")); + public List getInputParameters() { + List parameters = new ArrayList(); + List templatesOccurrences = new ArrayList(); + templatesOccurrences.add(TableTemplates.OCCURRENCE); + List templateHspen = new ArrayList(); + templateHspen.add(TableTemplates.HSPEN); + List templateHcaf = new ArrayList(); + templateHcaf.add(TableTemplates.HCAF); + + InputTable p1 = new InputTable(templateHspen,"EnvelopeTable","The previous hspen table for regeneration","hspen"); + InputTable p2 = new InputTable(templateHcaf,"CsquarecodesTable","HCaf Table","hcaf_d"); + InputTable p3 = new InputTable(templatesOccurrences,"OccurrenceCellsTable","Ocurrence Cells Table","occurrencecells"); + PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.CONSTANT, "CreateTable","Create New Table for each computation","true"); + PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "OuputEnvelopeTableLabel","Table name for the new hspen","hspen_1"); + ServiceType p11 = new ServiceType(ServiceParameters.RANDOMSTRING, "OuputEnvelopeTable","Table name for the new hspen","hspen_"); + DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name"); + DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password"); + DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver"); + DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url"); + DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect"); + + parameters.add(p1); + parameters.add(p2); + parameters.add(p3); + parameters.add(p4); + parameters.add(p5); + parameters.add(p6); + parameters.add(p7); + parameters.add(p8); + parameters.add(p9); + parameters.add(p10); + parameters.add(p11); + return parameters; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java index 3aa7c01..13a6d1e 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java @@ -1,6 +1,5 @@ package org.gcube.dataanalysis.ecoengine.processing; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; @@ -17,10 +16,10 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Generator; +import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class LocalSimpleSplitGenerator implements Generator { @@ -328,18 +327,21 @@ public class LocalSimpleSplitGenerator implements Generator { } @Override - public HashMap getInputParameters() { + public List getInputParameters() { return distributionModel.getInputParameters(); } + + @Override - public VARTYPE getContentType() { - return distributionModel.getContentType(); + public StatisticalType getOutput() { + + return distributionModel.getOutput(); } @Override - public Object getContent() { - return distributionModel.getContent(); + public SpatialProbabilityDistribution getAlgorithm() { + return distributionModel; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java index 4366f66..df61439 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java @@ -1,6 +1,5 @@ package org.gcube.dataanalysis.ecoengine.processing; -import java.util.HashMap; import java.util.List; import java.util.Properties; import java.util.Queue; @@ -19,11 +18,11 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Generator; +import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class LocalSplitGenerator implements Generator { @@ -497,18 +496,22 @@ public class LocalSplitGenerator implements Generator { } @Override - public HashMap getInputParameters() { + public List getInputParameters() { return distributionModel.getInputParameters(); } + + @Override - public VARTYPE getContentType() { - return distributionModel.getContentType(); + public StatisticalType getOutput() { + + return distributionModel.getOutput(); } @Override - public Object getContent() { - return config.getParam("DistributionTable"); + public SpatialProbabilityDistribution getAlgorithm() { + return distributionModel; } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/RainyCloudGenerator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/RainyCloudGenerator.java index 5f329c6..8e760fc 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/RainyCloudGenerator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/RainyCloudGenerator.java @@ -1,6 +1,7 @@ package org.gcube.dataanalysis.ecoengine.processing; import java.util.HashMap; +import java.util.List; import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; @@ -12,11 +13,11 @@ import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject; import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; -import org.gcube.dataanalysis.ecoengine.interfaces.Generator; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution; -public class RainyCloudGenerator implements Generator { +//deprecated +public class RainyCloudGenerator { AlgorithmConfiguration config; private boolean interruptProcessing; @@ -31,7 +32,6 @@ public class RainyCloudGenerator implements Generator { public RainyCloudGenerator() { } - @Override public float getStatus() { RemoteHspecOutputObject oo = remoteGenerationManager.retrieveCompleteStatus(); @@ -45,7 +45,6 @@ public class RainyCloudGenerator implements Generator { } } - @Override public void init() { AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); interruptProcessing = false; @@ -89,17 +88,16 @@ public class RainyCloudGenerator implements Generator { remoteGenerationManager = new RemoteGenerationManager(config.getParam("RemoteCalculator")); } - @Override + + public void setConfiguration(AlgorithmConfiguration config) { this.config = config; } - @Override public void shutdown() { interruptProcessing = true; } - @Override public String getResourceLoad() { String returnString = "[]"; @@ -115,7 +113,6 @@ public class RainyCloudGenerator implements Generator { return returnString; } - @Override public String getResources() { Resources res = new Resources(); try { @@ -129,7 +126,6 @@ public class RainyCloudGenerator implements Generator { return "[]"; } - @Override public String getLoad() { RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus(); String returnString = "[]"; @@ -140,7 +136,7 @@ public class RainyCloudGenerator implements Generator { return returnString; } - @Override + public void generate() throws Exception { try { @@ -163,18 +159,18 @@ public class RainyCloudGenerator implements Generator { AnalysisLogger.getLogger().trace("REMOTE PROCESSING ENDED"); } - @Override + public ALG_PROPS[] getSupportedAlgorithms() { ALG_PROPS[] p = { ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE }; return p; } - @Override + public INFRASTRUCTURE getInfrastructure() { return INFRASTRUCTURE.D4SCIENCE; } - @Override + /* public HashMap getInputParameters() { HashMap parameters = new HashMap(); parameters.put("RemoteEnvironment", new VarCouple(VARTYPE.INFRA,"")); @@ -191,14 +187,14 @@ public class RainyCloudGenerator implements Generator { return parameters; } - @Override + public VARTYPE getContentType() { return VARTYPE.HSPEC; } - @Override + public Object getContent() { return config.getParam("DistributionTable"); } - +*/ } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/EvaluatorsFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/EvaluatorsFactory.java index 968c9ab..5258f5e 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/EvaluatorsFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/EvaluatorsFactory.java @@ -1,17 +1,11 @@ package org.gcube.dataanalysis.ecoengine.processing.factories; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Properties; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; -import org.gcube.dataanalysis.ecoengine.interfaces.Model; -import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class EvaluatorsFactory { @@ -26,8 +20,8 @@ public class EvaluatorsFactory { return evaluators; } - public static HashMap getEvaluatorParameters(String configPath, String algorithmName) throws Exception{ - HashMap inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.evaluatorsFile, algorithmName); + public static List getEvaluatorParameters(String configPath, String algorithmName) throws Exception{ + List inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.evaluatorsFile, algorithmName); return inputs; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java index abd7cad..c8114b4 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java @@ -1,7 +1,6 @@ package org.gcube.dataanalysis.ecoengine.processing.factories; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Properties; import java.util.ServiceLoader; @@ -9,10 +8,10 @@ import java.util.ServiceLoader; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class GeneratorsFactory { @@ -69,8 +68,8 @@ public class GeneratorsFactory { - public static HashMap getAlgorithmParameters(String configPath, String algorithmName) throws Exception{ - HashMap inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName); + public static List getAlgorithmParameters(String configPath, String algorithmName) throws Exception{ + List inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName); return inputs; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java index ca2f0c8..e156237 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java @@ -1,16 +1,15 @@ package org.gcube.dataanalysis.ecoengine.processing.factories; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Properties; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class ModelersFactory { @@ -31,8 +30,8 @@ public class ModelersFactory { } - public static HashMap getModelParameters(String configPath, String algorithmName) throws Exception{ - HashMap inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName); + public static List getModelParameters(String configPath, String algorithmName) throws Exception{ + List inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName); return inputs; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java index 4c88323..67a9485 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java @@ -7,17 +7,17 @@ import java.util.Properties; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class ProcessorsFactory { - public static HashMap getDefaultDatabaseConfiguration(String cfgPath) { + public static List getDefaultDatabaseConfiguration(String cfgPath) { String databasecfgfile = cfgPath + AlgorithmConfiguration.defaultConnectionFile; try { return DatabaseFactory.getDefaultDatabaseConfiguration(databasecfgfile); @@ -37,7 +37,7 @@ public class ProcessorsFactory { return algs; } - public static HashMap getParameters(String file, String algorithmName) throws Exception { + public static List getParameters(String file, String algorithmName) throws Exception { Properties p = AlgorithmConfiguration.getProperties(file); String algorithmclass = p.getProperty(algorithmName); diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNN.java b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNN.java index 9fa2e8e..59826bc 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNN.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNN.java @@ -3,13 +3,16 @@ package org.gcube.dataanalysis.ecoengine.spatialdistributions; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; -import java.util.HashMap; +import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class AquamapsNN extends AquamapsNative{ @@ -27,24 +30,15 @@ public class AquamapsNN extends AquamapsNative{ } @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); + public List getInputParameters() { + List parameters = super.getInputParameters(); - parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen")); - parameters.put("PreprocessedTable", new VarCouple(VARTYPE.STRING,"maxminlat_hspen")); - - parameters.put("SpeciesTable", new VarCouple(VARTYPE.STRING, "hspen")); - parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING, "hcaf_d")); - parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM, "hspec_nn_")); - parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, "")); - parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true")); - parameters.put("UserName", new VarCouple(VARTYPE.STRING, "")); + PrimitiveType p1 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Name of the Species for which the distribution has to be produced","Fis-10407"); + ServiceType p2 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username"); + + parameters.add(p1); + parameters.add(p2); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - return parameters; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNNS.java b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNNS.java index 7f8b55f..ba04ee4 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNNS.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNNS.java @@ -3,13 +3,16 @@ package org.gcube.dataanalysis.ecoengine.spatialdistributions; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; -import java.util.HashMap; +import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.NeuralNet; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class AquamapsNNNS extends AquamapsNative{ @@ -27,24 +30,15 @@ public class AquamapsNNNS extends AquamapsNative{ } @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); + public List getInputParameters() { + List parameters = super.getInputParameters(); - parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen")); - parameters.put("PreprocessedTable", new VarCouple(VARTYPE.STRING,"maxminlat_hspen")); - - parameters.put("SpeciesTable", new VarCouple(VARTYPE.STRING, "hspen")); - parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING, "hcaf_d")); - parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM, "hspec_nn_")); - parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, "")); - parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true")); - parameters.put("UserName", new VarCouple(VARTYPE.STRING, "")); + PrimitiveType p1 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Name of the Species for which the distribution has to be produced","Fis-10407"); + ServiceType p2 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username"); + + parameters.add(p1); + parameters.add(p2); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - return parameters; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNSuitable.java b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNSuitable.java index b7b0ef2..53ea04c 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNSuitable.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsNNSuitable.java @@ -3,13 +3,16 @@ package org.gcube.dataanalysis.ecoengine.spatialdistributions; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; -import java.util.HashMap; +import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class AquamapsNNSuitable extends AquamapsSuitable{ @@ -27,24 +30,15 @@ public class AquamapsNNSuitable extends AquamapsSuitable{ } @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); + public List getInputParameters() { + List parameters = super.getInputParameters(); - parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen")); - parameters.put("PreprocessedTable", new VarCouple(VARTYPE.STRING,"maxminlat_hspen")); - - parameters.put("SpeciesTable", new VarCouple(VARTYPE.STRING, "hspen")); - parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING, "hcaf_d")); - parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM, "hspec_nn_")); - parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, "")); - parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true")); - parameters.put("UserName", new VarCouple(VARTYPE.STRING, "")); + PrimitiveType p1 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Name of the Species for which the distribution has to be produced","Fis-10407"); + ServiceType p2 = new ServiceType(ServiceParameters.USERNAME, "UserName","LDAP username"); + + parameters.add(p1); + parameters.add(p2); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - return parameters; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsSuitable.java b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsSuitable.java index 2d294f9..31bfa7d 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsSuitable.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/AquamapsSuitable.java @@ -8,10 +8,18 @@ import java.util.Queue; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; import org.hibernate.SessionFactory; public class AquamapsSuitable implements SpatialProbabilityDistributionTable{ @@ -20,6 +28,7 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{ String csquareCodeQuery = "select csquarecode,depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea,centerlat,centerlong,faoaream,eezall,lme from %1$s d where oceanarea>0"; String createTableStatement = "CREATE TABLE %1$s ( speciesid character varying, csquarecode character varying, probability real, boundboxyn smallint, faoareayn smallint, faoaream integer, eezall character varying, lme integer) WITH (OIDS=FALSE ); CREATE INDEX CONCURRENTLY %1$s_idx ON %1$s USING btree (speciesid, csquarecode, faoaream, eezall, lme);"; String destinationTable; + String destinationTableLabel; String metainfo ="boundboxyn, faoareayn, faoaream, eezall, lme"; String selectAllSpeciesObservationQuery = "SELECT speciesid,maxclat,minclat from %1$s;"; String hspenMinMaxLat = "maxminlat_hspen"; @@ -36,6 +45,8 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{ csquareCodeQuery = String.format(csquareCodeQuery, config.getParam("CsquarecodesTable")); createTableStatement = String.format(createTableStatement,config.getParam("DistributionTable")); destinationTable = config.getParam("DistributionTable"); + destinationTableLabel = config.getParam("DistributionTableLabel"); + core = new AquamapsAlgorithmCore(); if ((config.getParam("PreprocessedTable")!=null)&&(config.getParam("PreprocessedTable").length()>0)) @@ -204,33 +215,54 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{ } @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); - parameters.put("EnvelopeTable", new VarCouple(VARTYPE.HSPEN,"hspen")); - parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.HCAF,"hcaf_d")); - parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM,"hspec_")); - parameters.put("PreprocessedTable", new VarCouple(VARTYPE.MINMAXLAT,"maxminlat_hspen")); - parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true")); - parameters.put("DatabaseUserName",new VarCouple(VARTYPE.DATABASEUSERNAME,"")); - parameters.put("DatabasePassword",new VarCouple(VARTYPE.DATABASEPASSWORD,"")); - parameters.put("DatabaseURL",new VarCouple(VARTYPE.DATABASEURL,"")); - parameters.put("DatabaseDriver",new VarCouple(VARTYPE.DATABASEDRIVER,"")); + public List getInputParameters() { + List parameters = new ArrayList(); + List templatesMinmax = new ArrayList(); + templatesMinmax.add(TableTemplates.MINMAXLAT); + List templateHspen = new ArrayList(); + templateHspen.add(TableTemplates.HSPEN); + List templateHcaf = new ArrayList(); + templateHcaf.add(TableTemplates.HCAF); + + InputTable p1 = new InputTable(templateHspen,"EnvelopeTable","The previous hspen table for regeneration","hspen"); + InputTable p2 = new InputTable(templateHcaf,"CsquarecodesTable","HCaf Table","hcaf_d"); + ServiceType p3 = new ServiceType(ServiceParameters.RANDOMSTRING, "DistributionTable","Table name of the distribution","hspec_"); + PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "DistributionTableLabel","Name of the HSPEC probability distribution","hspec"); + InputTable p5 = new InputTable(templateHcaf,"PreprocessedTable","Minimum maximum latitudes table for species","maxminlat_hspen"); + PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.CONSTANT, "CreateTable","Create New Table for each computation","true"); + + DatabaseType p7 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name"); + DatabaseType p8 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password"); + DatabaseType p9 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver"); + DatabaseType p10 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url"); + DatabaseType p11 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect"); + + parameters.add(p1); + parameters.add(p2); + parameters.add(p3); + parameters.add(p4); + parameters.add(p5); + parameters.add(p6); + parameters.add(p7); + parameters.add(p8); + parameters.add(p9); + parameters.add(p10); + parameters.add(p11); return parameters; } - @Override - public VARTYPE getContentType() { - return VARTYPE.HSPEC; - } + + @Override - public Object getContent() { - return destinationTable; + public StatisticalType getOutput() { + List templateHspec = new ArrayList(); + templateHspec.add(TableTemplates.HSPEC); + OutputTable p = new OutputTable(templateHspec,destinationTableLabel,destinationTable,"Output hspec table"); + return p; } - - - + diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/DummyAlgorithm.java b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/DummyAlgorithm.java index 63f4f84..836aa33 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/DummyAlgorithm.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/DummyAlgorithm.java @@ -5,7 +5,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; @@ -13,9 +12,10 @@ import java.util.UUID; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{ @@ -188,23 +188,19 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{ @Override public String getDescription() { - // TODO Auto-generated method stub + + return "a testing algorithm for statistical service work performances - calculates a random probability distribution and stores on a file"; + } + + @Override + public List getInputParameters() { + return null; } - + @Override - public HashMap getInputParameters() { - return null; + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(File.class.getName(), new File(filename), PrimitiveTypes.FILE, "DummyDistribution","Dummy Distribution File"); + return p; } - - @Override - public VARTYPE getContentType() { - return VARTYPE.FILE; - } - - @Override - public Object getContent() { - return new File(filename); - } - } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/TestAlgorithm.java b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/TestAlgorithm.java index 4bae132..fc63c1e 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/TestAlgorithm.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/spatialdistributions/TestAlgorithm.java @@ -5,7 +5,6 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; @@ -13,9 +12,10 @@ import java.util.UUID; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{ @@ -136,22 +136,18 @@ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{ @Override public String getDescription() { + return "A performance test algorithm for the Statistical Manager - generates a constant probability distribution"; + } + + @Override + public List getInputParameters() { return null; } @Override - public HashMap getInputParameters() { - return null; - } - - @Override - public VARTYPE getContentType() { - return VARTYPE.FILE; - } - - @Override - public Object getContent() { - return new File(filename); + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(File.class.getName(), new File(filename), PrimitiveTypes.FILE, "TestDistribution","Test Distribution File"); + return p; } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java index 26c5710..68bf934 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java @@ -5,13 +5,13 @@ import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory; import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory; import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory; import org.gcube.dataanalysis.ecoengine.processing.factories.ProcessorsFactory; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; public class TestsMetaInfo { /** @@ -22,7 +22,7 @@ public class TestsMetaInfo { public static void main(String[] args) throws Exception { System.out.println("***TEST 1 - Get Algorithm Information***"); - HashMap map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY"); + List map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY"); System.out.println("input for DUMMY algorithm: "+map); map = GeneratorsFactory.getAlgorithmParameters("./cfg/","AQUAMAPS_SUITABLE"); diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionComplexGeneration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionComplexGeneration.java index 39f77d0..eff00ac 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionComplexGeneration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionComplexGeneration.java @@ -17,13 +17,15 @@ public class RegressionComplexGeneration { public static void main(String[] args) throws Exception { System.out.println("TEST 1"); + /* List generators = GeneratorsFactory.getGenerators(testConfigRemote()); generators.get(0).init(); // generate(generators.get(0)); generators = null; + */ System.out.println("TEST 2"); - generators = GeneratorsFactory.getGenerators(testConfigLocal()); + List generators = GeneratorsFactory.getGenerators(testConfigLocal()); generators.get(0).init(); generate(generators.get(0)); generators = null; diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestEvaluation.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestEvaluation.java index a715e81..95fc0db 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestEvaluation.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestEvaluation.java @@ -5,6 +5,7 @@ import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis; import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory; @@ -73,8 +74,8 @@ public static void main(String[] args) throws Exception { public void run() { try { - - HashMap out = dg.process(config); + PrimitiveType output = (PrimitiveType) dg.process(config); + HashMap out = (HashMap)output.getContent(); DiscrepancyAnalysis.visualizeResults(out); } catch (Exception e) { diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisDev.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisDev.java new file mode 100644 index 0000000..ab119db --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisDev.java @@ -0,0 +1,61 @@ +package org.gcube.dataanalysis.ecoengine.test.regressions; + +import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis; + +public class TestHSPECBioClimateAnalysisDev { + + + public static void main(String args[]) throws Exception{ + /* + String dburl = "jdbc:postgresql://node49.p.d4science.research-infrastructures.eu/aquamaps"; + String dbUser = "gcube"; + String dbPassword = "bilico1980"; + */ + + String dburl = "jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated"; + String dbUser = "utente"; + String dbPassword = "d4science"; + + BioClimateAnalysis bioClimate=new BioClimateAnalysis("./cfg/","./",dburl, dbUser, dbPassword, true); + + /* + final String [] hspecTables = { + "hspec2012_07_05_21_47_13_772", +"hspec2012_07_05_21_47_13_801", +"hspec2012_07_05_21_47_13_819", +"hspec2012_07_05_21_47_13_842", +"hspec2012_07_05_21_47_13_860", +"hspec2012_07_05_21_47_13_888", +"hspec2012_07_05_21_47_13_903", +"hspec2012_07_05_21_47_13_917" + }; +*/ + final String [] hspecTables = { + "hspec2012_03_12_12_13_14_610", + "hspec2012_03_12_15_07_50_820", + "hspec2012_03_12_18_07_21_503", + "hspec2012_03_12_23_59_57_744", + "hspec2012_03_13_02_50_59_399", + "hspec2012_03_13_10_22_31_865" + }; + + final String [] hspecTableNames = { + "T1", + "T2", + "T3", + "T4", + "T5", + "T6", + "T7", + "T8" + }; + + +// bioClimate.globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, "probability", "csquare", 0.8f); + bioClimate.speciesGeographicEvolutionAnalysis(hspecTables, hspecTableNames,0.8f); + + } + + + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisProd.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisProd.java new file mode 100644 index 0000000..a4bf645 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisProd.java @@ -0,0 +1,57 @@ +package org.gcube.dataanalysis.ecoengine.test.regressions; + +import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis; + +public class TestHSPECBioClimateAnalysisProd { + + + public static void main(String args[]) throws Exception{ + + String dburl = "jdbc:postgresql://node49.p.d4science.research-infrastructures.eu/aquamaps"; + String dbUser = "gcube"; + String dbPassword = "bilico1980"; + + /* + String dburl = "jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated"; + String dbUser = "utente"; + String dbPassword = "d4science"; + */ + + BioClimateAnalysis bioClimate=new BioClimateAnalysis("./cfg/","./",dburl, dbUser, dbPassword, true); + + + final String [] hspecTables = { + "hspec2012_07_02_17_14_10_063", + "hspec2012_07_05_21_47_13_772", + "hspec2012_07_05_21_47_13_801" +// "hspec2012_07_05_21_47_13_819", +// "hspec2012_07_05_21_47_13_842", +// "hspec2012_07_05_21_47_13_860", +// "hspec2012_07_05_21_47_13_888", +// "hspec2012_07_05_21_47_13_903", +// "hspec2012_07_05_21_47_13_917", +// "hspec2012_07_06_13_05_11_775" + }; + + final String [] hspecTableNames = { + "HSPEC 2015 Suitable Parabolic ", + "HSPEC 2018 Suitable Parabolic", + "HSPEC 2021 Suitable Parabolic", + "HSPEC 2024 Suitable Parabolic", + "HSPEC 2027 Suitable Parabolic", + "HSPEC 2030 Suitable Parabolic", + "HSPEC 2033 Suitable Parabolic", + "HSPEC 2036 Suitable Parabolic", + "HSPEC 2039 Suitable Parabolic", + "HSPEC 2042 Suitable Parabolic", + }; + + + bioClimate.globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, "probability", "csquare", 0.8f); +// bioClimate.speciesGeographicEvolutionAnalysis(hspecTables, hspecTableNames,0.8f); + + } + + + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java b/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java index 7db00ff..074df5c 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java @@ -4,6 +4,7 @@ import java.util.HashMap; import java.util.List; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis; import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory; @@ -16,8 +17,9 @@ public class EvaluatorT implements Runnable{ public void run() { try { - - HashMap out = dg.process(config); + + PrimitiveType output = (PrimitiveType) dg.process(config); + HashMap out = (HashMap)output.getContent(); DiscrepancyAnalysis.visualizeResults(out); } catch (Exception e) { diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/DatabaseFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/DatabaseFactory.java index a23d9ee..1e8a8c8 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/DatabaseFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/DatabaseFactory.java @@ -7,7 +7,7 @@ import java.io.FileInputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.Statement; -import java.util.HashMap; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -18,6 +18,9 @@ import org.dom4j.Node; import org.dom4j.io.SAXReader; import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools; +import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; +import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.SessionFactory; @@ -35,9 +38,9 @@ public class DatabaseFactory{ } - public static HashMap getDefaultDatabaseConfiguration(String configurationFile) throws Exception { + public static List getDefaultDatabaseConfiguration(String configurationFile) throws Exception { - HashMap defaultconfig = new HashMap(); + List defaultconfig = new ArrayList(); // take the configuration file File fl = new File(configurationFile); @@ -53,15 +56,15 @@ public class DatabaseFactory{ Node currentnode = nodesIterator.next(); String element = currentnode.valueOf("@name"); if (element.equals("connection.driver_class")) - defaultconfig.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, currentnode.getText())); + defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver",currentnode.getText())); if (element.equals("connection.url")) - defaultconfig.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, currentnode.getText())); + defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url",currentnode.getText())); if (element.equals("connection.username")) - defaultconfig.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, currentnode.getText())); + defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name",currentnode.getText())); if (element.equals("connection.password")) - defaultconfig.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, currentnode.getText())); + defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password",currentnode.getText())); if (element.equals("dialect")) - defaultconfig.put("DatabaseDialect", new VarCouple(VARTYPE.DATABASEDIALECT, currentnode.getText())); + defaultconfig.add(new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect",currentnode.getText())); } return defaultconfig; diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/TrainingSetsGenerator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/TrainingSetsGenerator.java index cd8c0ca..ad94cf9 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/TrainingSetsGenerator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/TrainingSetsGenerator.java @@ -19,20 +19,7 @@ public class TrainingSetsGenerator { float threshold = 0.1f; String configPath = "./cfg/"; - public HashMap getInputParameters() { - - HashMap parameters = new HashMap(); - - parameters.put("casesTable", new VarCouple(VARTYPE.STRING, "")); - parameters.put("columnKeyName", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - - return parameters; - } + private int calculateNumberOfPoints(String table) { diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/VARTYPE.java b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/VARTYPE.java deleted file mode 100644 index a1a93c2..0000000 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/VARTYPE.java +++ /dev/null @@ -1,22 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.utils; - -public enum VARTYPE { - STRING, - EPR_LIST, - INFRA, - SERVICE, - DATABASEUSERNAME, - DATABASEPASSWORD, - DATABASEURL, - DATABASEDRIVER, - DATABASEDIALECT, - CONSTANT, - RANDOM, - HSPEN, - HCAF, - HSPEC, - OCCURRENCE, - MINMAXLAT, - FILE, - MAP -} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/VarCouple.java b/src/main/java/org/gcube/dataanalysis/ecoengine/utils/VarCouple.java deleted file mode 100644 index 0c53cde..0000000 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/utils/VarCouple.java +++ /dev/null @@ -1,26 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.utils; - -public class VarCouple { - - - VARTYPE first; - private String second; - - public VarCouple(VARTYPE first, String second){ - this.first = first; - this.second = second; - } - - public String getSecond(){ - return second; - } - - public String getFirst(){ - return first.toString(); - } - - public String toString(){ - return "("+first+","+second+")"; - } - -}