This commit is contained in:
Gianpaolo Coro 2012-01-10 16:56:27 +00:00
parent ed7f25af8b
commit fc765e5a15
21 changed files with 112 additions and 104 deletions

View File

@ -1,64 +1,17 @@
<?xml version='1.0' encoding='UTF-8'?>
<!--
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
-->
<hibernate-configuration>
<session-factory>
<property name="connection.driver_class">org.postgresql.Driver</property>
<property name="connection.provider_class">
org.hibernate.connection.C3P0ConnectionProvider
</property>
<!-- <property name="connection.url">jdbc:postgresql://geoserver.d4science-ii.research-infrastructures.eu/testPortingGianpaolo</property>-->
<!-- <property name="connection.url">jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu/aquamapsorgdb</property> -->
<!-- <property name="connection.url">jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated</property>-->
<property name="connection.url">jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated</property>
<property name="connection.username">utente</property>
<property name="connection.password">d4science</property>
<!-- <property name="dialect"> org.hibernate.dialect.PostgresPlusDialect</property> -->
<!-- <property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property> -->
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
<property name="connection.url">jdbc:postgresql://localhost/testdb</property>
<property name="connection.username">gcube</property>
<property name="connection.password">d4science2</property>
<property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property>
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="c3p0.idleConnectionTestPeriod">3600</property> <!-- seconds -->
<property name="c3p0.automaticTestTable">connectiontesttable</property>
<!--
thread is the short name for
org.hibernate.context.ThreadLocalSessionContext and let Hibernate
bind the session automatically to the thread
-->
<!-- JDBC connection pool (use the built-in) -->
<!--<property name="connection.pool_size">1</property-->
<property name="c3p0.timeout">7200</property>
<property name="c3p0.idle_test_period">3600</property>
<property name="c3p0.max_size">10</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">1</property>
<property name="current_session_context_class">thread</property>
<!-- Disable the second-level cache -->
<!--
<property
name="cache.provider_class">org.hibernate.cache.NoCacheProvider</property>
-->
<!--
<property name="hibernate.hbm2ddl.auto">create</property>
-->
<!-- this will show us all sql statements -->
<property name="hibernate.show_sql">false</property>
<!-- mapping files
<mapping resource="it/hcare/seat/report/mappings/HCallsTable.hbm.xml" />
<mapping resource="it/hcare/seat/report/mappings/HCallsDialogo.hbm.xml" />
<mapping resource="it/hcare/seat/report/mappings/HCallsRiconoscimenti.hbm.xml" />
-->
</session-factory>
</hibernate-configuration>

View File

@ -5,13 +5,14 @@ import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public interface Generator {
public ALG_PROPS[] getSupportedAlgorithms();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
public HashMap<String, VarCouple> getInputParameters();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public WEIGHT getWeight();

View File

@ -4,6 +4,7 @@ import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public interface Model {
@ -18,7 +19,7 @@ public interface Model {
public String getDescription();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
public HashMap<String, VarCouple> getInputParameters();
public float getVersion();

View File

@ -5,6 +5,7 @@ import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public interface Modeler {
@ -12,7 +13,7 @@ public interface Modeler {
public ALG_PROPS[] getSupportedModels();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
public HashMap<String, VarCouple> getInputParameters();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public WEIGHT getWeight();

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public interface SpatialProbabilityDistribution {
@ -16,6 +17,6 @@ public interface SpatialProbabilityDistribution {
public String getDescription();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
public HashMap<String, VarCouple> getInputParameters();
}

View File

@ -7,6 +7,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class SimpleModeler implements Modeler{
private Model innermodel;
@ -65,7 +66,7 @@ public class SimpleModeler implements Modeler{
}
@Override
public HashMap<String, String> getInputParameters() {
public HashMap<String, VarCouple> getInputParameters() {
return innermodel.getInputParameters();
}

View File

@ -17,6 +17,8 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.AquamapsEnvelopeAlgorithm;
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.EnvelopeSet;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
import org.hibernate.SessionFactory;
public class ModelHSPEN implements Model {
@ -424,17 +426,17 @@ public class ModelHSPEN implements Model {
}
@Override
public HashMap<String, String> getInputParameters() {
HashMap<String, String> parameters = new HashMap<String,String>();
parameters.put("EnvelopeTable", "hspen");
parameters.put("CsquarecodesTable", "hcaf_d");
parameters.put("OccurrenceCellsTable", "occurrencecells");
parameters.put("CreateTable", "true");
parameters.put("OuputEnvelopeTable", "");
parameters.put("DatabaseUserName","");
parameters.put("DatabasePassword","");
parameters.put("DatabaseURL","");
parameters.put("DatabaseDriver","");
public HashMap<String, VarCouple> getInputParameters() {
HashMap<String, VarCouple> parameters = new HashMap<String,VarCouple>();
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen"));
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING,"hcaf_d"));
parameters.put("OccurrenceCellsTable", new VarCouple(VARTYPE.STRING,"occurrencecells"));
parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true"));
parameters.put("OuputEnvelopeTable", new VarCouple(VARTYPE.RANDOM,"hspen_"));
parameters.put("DatabaseUserName",new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabasePassword",new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabaseURL",new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabaseDriver",new VarCouple(VARTYPE.DATABASEINFO,""));
return parameters;
}

View File

@ -19,6 +19,7 @@ import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class LocalSimpleSplitGenerator implements Generator {
@ -331,7 +332,7 @@ public class LocalSimpleSplitGenerator implements Generator {
}
@Override
public HashMap<String, String> getInputParameters() {
public HashMap<String, VarCouple> getInputParameters() {
return distributionModel.getInputParameters();
}

View File

@ -22,6 +22,7 @@ import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
import org.hibernate.SessionFactory;
public class LocalSplitGenerator implements Generator {
@ -474,7 +475,7 @@ public class LocalSplitGenerator implements Generator {
}
@Override
public HashMap<String, String> getInputParameters() {
public HashMap<String, VarCouple> getInputParameters() {
return distributionModel.getInputParameters();
}

View File

@ -13,6 +13,8 @@ import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class RainyCloudGenerator implements Generator {
@ -177,17 +179,17 @@ public class RainyCloudGenerator implements Generator {
}
@Override
public HashMap<String, String> getInputParameters() {
HashMap<String, String> parameters = new HashMap<String, String>();
parameters.put("RemoteEnvironment", "");
parameters.put("ServiceUserName", "");
parameters.put("CsquarecodesTable", "");
parameters.put("DatabaseURL", "");
parameters.put("DatabaseUserName", "");
parameters.put("DatabasePassword", "");
parameters.put("DistributionTable", "");
parameters.put("EnvelopeTable", "");
parameters.put("RemoteCalculator", "");
public HashMap<String, VarCouple> getInputParameters() {
HashMap<String, VarCouple> parameters = new HashMap<String, VarCouple>();
parameters.put("RemoteEnvironment", new VarCouple(VARTYPE.STRING,""));
parameters.put("ServiceUserName", new VarCouple(VARTYPE.STRING,""));
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING,""));
parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM,"hspec_rem_"));
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,""));
parameters.put("RemoteCalculator", new VarCouple(VARTYPE.STRING,""));
return parameters;
}

View File

@ -12,6 +12,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class GeneratorsFactory {
@ -68,8 +69,8 @@ public class GeneratorsFactory {
public static HashMap<String,String> getAlgorithmParameters(String configPath, String algorithmName) throws Exception{
HashMap<String,String> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName);
public static HashMap<String,VarCouple> getAlgorithmParameters(String configPath, String algorithmName) throws Exception{
HashMap<String,VarCouple> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName);
return inputs;
}

View File

@ -10,6 +10,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class ModelersFactory {
@ -30,8 +31,8 @@ public class ModelersFactory {
}
public static HashMap<String,String> getModelParameters(String configPath, String algorithmName) throws Exception{
HashMap<String,String> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
public static HashMap<String,VarCouple> getModelParameters(String configPath, String algorithmName) throws Exception{
HashMap<String,VarCouple> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
return inputs;
}

View File

@ -11,6 +11,7 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class ProcessorsFactory {
@ -26,7 +27,7 @@ public class ProcessorsFactory {
}
public static HashMap<String,String> getParameters(String file, String algorithmName) throws Exception{
public static HashMap<String,VarCouple> getParameters(String file, String algorithmName) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(file);
String algorithmclass = p.getProperty(algorithmName);

View File

@ -10,6 +10,8 @@ import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
import org.hibernate.SessionFactory;
public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
@ -202,17 +204,17 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
}
@Override
public HashMap<String, String> getInputParameters() {
HashMap<String, String> parameters = new HashMap<String,String>();
parameters.put("EnvelopeTable", "hspen");
parameters.put("CsquarecodesTable", "hcaf_d");
parameters.put("DistributionTable", "hspec_default");
parameters.put("PreprocessedTable", "maxminlat_hspen");
parameters.put("CreateTable", "hspen_minmaxlat");
parameters.put("DatabaseUserName","");
parameters.put("DatabasePassword","");
parameters.put("DatabaseURL","");
parameters.put("DatabaseDriver","");
public HashMap<String, VarCouple> getInputParameters() {
HashMap<String, VarCouple> parameters = new HashMap<String,VarCouple>();
parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen"));
parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING,"hcaf_d"));
parameters.put("DistributionTable", new VarCouple(VARTYPE.RANDOM,"hspec_"));
parameters.put("PreprocessedTable", new VarCouple(VARTYPE.STRING,"maxminlat_hspen"));
parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"hspen_minmaxlat"));
parameters.put("DatabaseUserName",new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabasePassword",new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabaseURL",new VarCouple(VARTYPE.DATABASEINFO,""));
parameters.put("DatabaseDriver",new VarCouple(VARTYPE.DATABASEINFO,""));
return parameters;
}

View File

@ -13,6 +13,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
@ -189,7 +190,7 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
}
@Override
public HashMap<String, String> getInputParameters() {
public HashMap<String, VarCouple> getInputParameters() {
return null;
}

View File

@ -13,6 +13,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
@ -136,7 +137,7 @@ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
}
@Override
public HashMap<String, String> getInputParameters() {
public HashMap<String, VarCouple> getInputParameters() {
return null;
}

View File

@ -79,10 +79,12 @@ public static void main(String[] args) throws Exception {
config.setParam("CsquarecodesTable", "hcaf_d");
config.setParam("CreateTable","true");
/*
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
*/
return config;
}

View File

@ -8,6 +8,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
public class TestsMetaInfo {
/**
@ -18,7 +19,7 @@ public class TestsMetaInfo {
public static void main(String[] args) throws Exception {
System.out.println("***TEST 1 - Get Algorithm Information***");
HashMap<String, String> map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY");
HashMap<String, VarCouple> map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY");
System.out.println("input for DUMMY algorithm: "+map);
map = GeneratorsFactory.getAlgorithmParameters("./cfg/","AQUAMAPS_SUITABLE");

View File

@ -59,23 +59,23 @@ public class DatabaseFactory{
Node currentnode = nodesIterator.next();
String element = currentnode.valueOf("@name");
if (element.equals("connection.driver_class"))
if (config.getDatabaseDriver() != null){
if ((config.getDatabaseDriver() != null)&&(config.getDatabaseDriver().length()>0)){
currentnode.setText(config.getDatabaseDriver());
}
if (element.equals("connection.url")) {
if (config.getDatabaseURL() != null)
if ((config.getDatabaseURL() != null)&&(config.getDatabaseURL().length()>0))
currentnode.setText(config.getDatabaseURL());
}
if (element.equals("connection.username")) {
if (config.getDatabaseUserName() != null)
if ((config.getDatabaseUserName() != null)&&(config.getDatabaseUserName().length()>0))
currentnode.setText(config.getDatabaseUserName());
}
if (element.equals("connection.password")) {
if (config.getDatabasePassword() != null)
if ((config.getDatabasePassword() != null)&&(config.getDatabasePassword().length()>0))
currentnode.setText(config.getDatabasePassword());
}
if (element.equals("dialect")) {
if (config.getDatabaseDialect() != null)
if ((config.getDatabaseDialect() != null)&&(config.getDatabaseDialect().length()>0))
currentnode.setText(config.getDatabaseDialect());
}
if (element.equals("c3p0.idleConnectionTestPeriod")) {

View File

@ -0,0 +1,8 @@
package org.gcube.dataanalysis.ecoengine.utils;
public enum VARTYPE {
STRING,
DATABASEINFO,
CONSTANT,
RANDOM
}

View File

@ -0,0 +1,26 @@
package org.gcube.dataanalysis.ecoengine.utils;
public class VarCouple {
VARTYPE first;
private String second;
public VarCouple(VARTYPE first, String second){
this.first = first;
this.second = second;
}
public String getSecond(){
return second;
}
public String getFirst(){
return first.toString();
}
public String toString(){
return "("+first+","+second+")";
}
}