This commit is contained in:
Gianpaolo Coro 2012-08-31 14:17:46 +00:00
parent 9809d3df1e
commit b7b23ad5f9
9 changed files with 390 additions and 18 deletions

View File

@ -1,2 +1,4 @@
TESTTRANS=org.gcube.dataanalysis.ecoengine.transducers.TestTrans
BIOCLIMATE_HSPEC=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPECTransducer
BIOCLIMATE_HCAF=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHCAFTransducer
BIOCLIMATE_HSPEN=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPENTransducer
HCAF_INTERPOLATION=org.gcube.dataanalysis.ecoengine.transducers.InterpolationTransducer

View File

@ -10,6 +10,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
@ -178,7 +179,7 @@ public class DBScan implements Clusterer{
}
//create Table
AnalysisLogger.getLogger().debug("DBScan: Creating table "+OccurrencePointsClusterTable);
String [] features = FeaturesColumnNames.split(",");
String [] features = FeaturesColumnNames.split(AlgorithmConfiguration.getListSeparator());
String columns = "";
for (int i=0;i<features.length;i++){
@ -378,7 +379,10 @@ public class DBScan implements Clusterer{
templateOccs.add(TableTemplates.GENERIC);
InputTable p1 = new InputTable(templateOccs,"OccurrencePointsTable","Occurrence Points Table","occurrences");
PrimitiveType p2 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FeaturesColumnNames","Column Names for the features comma separated","x,y");
// PrimitiveType p2 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "FeaturesColumnNames","Column Names for the features comma separated","x,y");
PrimitiveTypesList p2 = new PrimitiveTypesList(PrimitiveTypes.STRING, "FeaturesColumnNames","Column Names for the features",false);
ServiceType p3 = new ServiceType(ServiceParameters.RANDOMSTRING, "OccurrencePointsClusterTable","Table name of the distribution","occCluster_");
PrimitiveType p4 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "epsilon","DBScan epsilon parameter","10");
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "minPoints","DBScan minimum points parameter (identifies outliers)","1");
@ -396,7 +400,6 @@ public class DBScan implements Clusterer{
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
parameters.add(p5);
parameters.add(p6);
parameters.add(p7);
parameters.add(p8);

View File

@ -5,12 +5,12 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
public class InputTablesList extends StatisticalType {
public class TablesList extends StatisticalType {
protected List<InputTable> list;
protected List<TableTemplates> templateNames;
public InputTablesList(List<TableTemplates> templateNames, String name, String description, boolean optional) {
public TablesList(List<TableTemplates> templateNames, String name, String description, boolean optional) {
super(name, description, optional);
list = new ArrayList<InputTable>();
this.templateNames=templateNames;

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.ecoengine.datatypes.enumtypes;
public enum PrimitiveTypes {
STRING,
NUMBER,
ENUMERATED,
CONSTANT,
RANDOM,
FILE,

View File

@ -3,17 +3,12 @@ package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
public class RegressionTestTransducers {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
@ -23,6 +18,20 @@ public static void main(String[] args) throws Exception {
Regressor.process(trans.get(0));
trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal2());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal3());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal4());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
}
@ -36,4 +45,41 @@ public static void main(String[] args) throws Exception {
return config;
}
private static AlgorithmConfiguration testConfigLocal2() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("BIOCLIMATE_HCAF");
config.setParam("HCAF_TABLE_LIST","hcaf_d"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2016_linear_01332632269756"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2016_linear_01336062995861"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2050");
config.setParam("HCAF_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test");
return config;
}
private static AlgorithmConfiguration testConfigLocal3() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("BIOCLIMATE_HSPEN");
config.setParam("HSPEN_TABLE_LIST","hspen"+AlgorithmConfiguration.getListSeparator()+"hspen_2016"+AlgorithmConfiguration.getListSeparator()+"hspen_2020"+AlgorithmConfiguration.getListSeparator()+"hspen_2050");
config.setParam("HSPEN_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test");
return config;
}
private static AlgorithmConfiguration testConfigLocal4() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("HCAF_INTERPOLATION");
config.setParam("FirstHCAF","hcaf_d");
config.setParam("SecondHCAF","hcaf_d_2050");
config.setParam("YearStart","2012");
config.setParam("YearEnd","2050");
config.setParam("NumberOfInterpolations","2");
config.setParam("InterpolationFunction",INTERPOLATIONFUNCTIONS.LINEAR.name());
return config;
}
}

View File

@ -0,0 +1,80 @@
package org.gcube.dataanalysis.ecoengine.transducers;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;
public class BioClimateHCAFTransducer extends BioClimateHSPECTransducer{
private String[] hcafTables;
private String[] hcafTablesNames;
@Override
public void init() throws Exception {
//init the analyzer
bioClimate=new BioClimateAnalysis(config.getConfigPath(),config.getPersistencePath(),config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), false);
//build the hspec names:
hcafTables = config.getParam("HCAF_TABLE_LIST").split(AlgorithmConfiguration.getListSeparator());
hcafTablesNames = config.getParam("HCAF_TABLE_NAMES").split(AlgorithmConfiguration.getListSeparator());
}
@Override
public String getDescription() {
return "Evaluates the climatic changes impact on the variation of the ocean features contained in Hcaf tables";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.HCAF);
TablesList p7 = new TablesList(templateHspec, "HCAF_TABLE_LIST", "List of HCAF tables to analyze", false);
PrimitiveTypesList p8 = new PrimitiveTypesList(PrimitiveTypes.STRING, "HCAF_TABLE_NAMES", "List of HCAF table names to be used as labels", false);
parameters.add(p1);
parameters.add(p2);
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
parameters.add(p6);
parameters.add(p7);
parameters.add(p8);
return parameters;
}
@Override
public void compute() throws Exception {
status = 0.1f;
try{
bioClimate.hcafEvolutionAnalysis(hcafTables, hcafTablesNames);
producedImages=bioClimate.getProducedImages();
}catch(Exception e){
e.printStackTrace();
throw e;
}
finally{
status = 100f;
}
}
}

View File

@ -8,7 +8,7 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTablesList;
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
@ -27,7 +27,7 @@ public class BioClimateHSPECTransducer implements Transducerer{
private String[] hspecTables;
private String[] hspecTablesNames;
float status = 0;
protected float status = 0;
@Override
public INFRASTRUCTURE getInfrastructure() {
@ -38,7 +38,7 @@ public class BioClimateHSPECTransducer implements Transducerer{
@Override
public void init() throws Exception {
//init the analyzer
bioClimate=new BioClimateAnalysis(config.getConfigPath(),"./",config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), false);
bioClimate=new BioClimateAnalysis(config.getConfigPath(),config.getPersistencePath(),config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), false);
//build the hspec names:
hspecTables = config.getParam("HSPEC_TABLE_LIST").split(AlgorithmConfiguration.getListSeparator());
hspecTablesNames = config.getParam("HSPEC_TABLE_NAMES").split(AlgorithmConfiguration.getListSeparator());
@ -81,7 +81,7 @@ public class BioClimateHSPECTransducer implements Transducerer{
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.HSPEC);
InputTablesList p7 = new InputTablesList(templateHspec, "HSPEC_TABLE_LIST", "List of HSPEC tables to analyze", false);
TablesList p7 = new TablesList(templateHspec, "HSPEC_TABLE_LIST", "List of HSPEC tables to analyze", false);
PrimitiveTypesList p8 = new PrimitiveTypesList(PrimitiveTypes.STRING, "HSPEC_TABLE_NAMES", "List of HSPEC table names to be used as labels", false);
PrimitiveType p9 = new PrimitiveType(Double.class.getName(), null, PrimitiveTypes.NUMBER, "Threshold", "A threshold of probability over which the abundancy per species will be calculated","0.5");
@ -91,7 +91,6 @@ public class BioClimateHSPECTransducer implements Transducerer{
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
parameters.add(p5);
parameters.add(p6);
parameters.add(p7);
parameters.add(p8);

View File

@ -0,0 +1,80 @@
package org.gcube.dataanalysis.ecoengine.transducers;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;
public class BioClimateHSPENTransducer extends BioClimateHSPECTransducer{
private String[] envelopeTables;
private String[] envelopeTablesNames;
@Override
public void init() throws Exception {
//init the analyzer
bioClimate=new BioClimateAnalysis(config.getConfigPath(),config.getPersistencePath(),config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), false);
//build the hspec names:
envelopeTables = config.getParam("HSPEN_TABLE_LIST").split(AlgorithmConfiguration.getListSeparator());
envelopeTablesNames = config.getParam("HSPEN_TABLE_NAMES").split(AlgorithmConfiguration.getListSeparator());
}
@Override
public String getDescription() {
return "Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
List<TableTemplates> templateHspec = new ArrayList<TableTemplates>();
templateHspec.add(TableTemplates.HCAF);
TablesList p7 = new TablesList(templateHspec, "HSPEN_TABLE_LIST", "List of HSPEN tables containing the species for which the salinity will be analyzed", false);
PrimitiveTypesList p8 = new PrimitiveTypesList(PrimitiveTypes.STRING, "HSPEN_TABLE_NAMES", "List of HSPEN table names to be used as labels", false);
parameters.add(p1);
parameters.add(p2);
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
parameters.add(p6);
parameters.add(p7);
parameters.add(p8);
return parameters;
}
@Override
public void compute() throws Exception {
status = 0.1f;
try{
bioClimate.speciesEvolutionAnalysis(envelopeTables,envelopeTablesNames, BioClimateAnalysis.salinityMinFeature, BioClimateAnalysis.salinityDefaultRange);
producedImages=bioClimate.getProducedImages();
}catch(Exception e){
e.printStackTrace();
throw e;
}
finally{
status = 100f;
}
}
}

View File

@ -0,0 +1,161 @@
package org.gcube.dataanalysis.ecoengine.transducers;
import java.awt.Image;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.TablesList;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.DatabaseParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
public class InterpolationTransducer implements Transducerer{
protected AlgorithmConfiguration config;
protected InterpolateTables interp;
private String[] producedtables;
protected float status = 0;
@Override
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.LOCAL;
}
@Override
public void init() throws Exception {
interp = new InterpolateTables(config.getConfigPath(), config.getPersistencePath(), config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"));
}
@Override
public void setConfiguration(AlgorithmConfiguration config) {
this.config=config;
}
@Override
public void shutdown() {
}
@Override
public float getStatus() {
if ((status>0)&&(status<100)){
return Math.min(interp.getStatus(),95f);
}
else
return status;
}
@Override
public String getDescription() {
return "Evaluates the climatic changes impact on species presence";
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
DatabaseType p1 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
DatabaseType p2 = new DatabaseType(DatabaseParameters.DATABASEPASSWORD, "DatabasePassword", "db password");
DatabaseType p3 = new DatabaseType(DatabaseParameters.DATABASEDRIVER, "DatabaseDriver", "db driver");
DatabaseType p4 = new DatabaseType(DatabaseParameters.DATABASEURL, "DatabaseURL", "db url");
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEDIALECT, "DatabaseDialect", "db dialect");
DatabaseType p6 = new DatabaseType(DatabaseParameters.DATABASETABLESPACE, "DatabaseTableSpace", "db dialect");
List<TableTemplates> templates = new ArrayList<TableTemplates>();
templates.add(TableTemplates.HCAF);
InputTable p7 = new InputTable(templates, "FirstHCAF", "HCAF table representing the starting point", "hcaf_d");
InputTable p8 = new InputTable(templates, "SecondHCAF", "HCAF table representing the ending point", "hcaf_d_2050");
PrimitiveType p9 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "YearStart", "The year associated to the FirstHCAF parameter","2012");
PrimitiveType p10 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "YearEnd", "The year associated to the SecondHCAF parameter","2050");
PrimitiveType p11 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, "NumberOfInterpolations", "Number of Intermediate Interpolation points","2");
PrimitiveType p12 = new PrimitiveType(Enum.class.getName(), null, PrimitiveTypes.ENUMERATED, "InterpolationFunction", "The interpolation Function to use","");
parameters.add(p1);
parameters.add(p2);
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
parameters.add(p6);
parameters.add(p7);
parameters.add(p8);
parameters.add(p9);
parameters.add(p10);
parameters.add(p11);
parameters.add(p12);
return parameters;
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> template = new ArrayList<TableTemplates>();
template.add(TableTemplates.HCAF);
TablesList p = new TablesList(template, "INTEPOLATED_HCAF_TABLE_LIST", "List of HCAF tables produced by the interpolation", false);
if ((producedtables!=null) &&(producedtables.length>0)){
int i=1;
for (String table:producedtables){
p.add(new OutputTable(template,table,table,"Interpolation point number "+i));
i++;
}
}
return p;
}
@Override
public void compute() throws Exception {
status = 0.1f;
try{
int nInterpolations = Integer.parseInt(config.getParam("NumberOfInterpolations"))+2;
String interpolationType = config.getParam("InterpolationFunction");
INTERPOLATIONFUNCTIONS fun = INTERPOLATIONFUNCTIONS.valueOf(interpolationType);
int year1 = Integer.parseInt(config.getParam("YearStart"));
int year2 = Integer.parseInt(config.getParam("YearEnd"));
interp.interpolate( config.getParam("FirstHCAF"), config.getParam("SecondHCAF"), nInterpolations, fun, year1, year2);
producedtables = interp.getInterpolatedTables();
}catch(Exception e){
e.printStackTrace();
throw e;
}
finally{
status = 100f;
}
}
ResourceFactory resourceManager;
public String getResourceLoad() {
if (resourceManager==null)
resourceManager = new ResourceFactory();
return resourceManager.getResourceLoad(1);
}
@Override
public String getResources() {
return ResourceFactory.getResources(100f);
}
}