This commit is contained in:
Gianpaolo Coro 2012-06-04 17:46:08 +00:00
parent 7b7b4e94e2
commit d171c45507
17 changed files with 131 additions and 37 deletions

View File

@ -0,0 +1 @@
AQUAMAPS_SUITABLE=org.gcube.dataanalysis.peeng.models.AquamapsSuitableNode

View File

@ -38,11 +38,15 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
public static String defaultConnectionFile = "DestinationDBHibernate.cfg.xml";
public static String defaultLoggerFile = "ALog.properties";
public static String algorithmsFile = "algorithms.properties";
public static String nodeAlgorithmsFile = "nodealgorithms.properties";
public static String generatorsFile = "generators.properties";
public static String modelsFile = "models.properties";
public static String modelersFile = "modelers.properties";
public static String evaluatorsFile = "evaluators.properties";
public static String RapidMinerOperatorsFile = "operators.xml";
public static String StatisticalManagerService = "StatisticalManager";
public static String StatisticalManagerClass = "Services";
public static int chunkSize = 100000;
public static int refreshResourcesTime = 10;
@ -69,6 +73,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
private String occurrenceCellsTable;
private List<String> featuresTable;
private List<String> preprocessedTables;
private List<String> endpoints;
//service and remote
private String remoteCalculatorEndpoint;
@ -79,6 +84,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
//modeling
private String model;
private String generator;
private String gcubeScope;
//other properties
private HashMap<String, String> generalProperties;
@ -181,4 +187,22 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
AnalysisLogger.getLogger().info("Rapid Miner initialized");
}
public String getGcubeScope() {
return gcubeScope;
}
public void setGcubeScope(String gcubeScope) {
this.gcubeScope = gcubeScope;
}
public List<String> getEndpoints() {
return endpoints;
}
public void setEndpoints(List<String> endpoints) {
this.endpoints = endpoints;
}
}

View File

@ -0,0 +1,9 @@
package org.gcube.dataanalysis.ecoengine.configuration;
public enum INFRASTRUCTURE {
RAINY_CLOUD,
D4SCIENCE,
LOCAL
}

View File

@ -1,13 +0,0 @@
package org.gcube.dataanalysis.ecoengine.configuration;
public enum WEIGHT {
VERY_HIGH,
HIGH,
MEDIUM_HIGH,
MEDIUM,
MEDIUM_LOW,
LOW,
LOWEST
}

View File

@ -2,7 +2,7 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
@ -18,7 +18,7 @@ public interface ComputationalAgent {
public float getStatus();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public WEIGHT getWeight();
public INFRASTRUCTURE getInfrastructure();
// gets the type of the content inside the generator: String, File, HashMap.
public VARTYPE getContentType();

View File

@ -4,7 +4,7 @@ import java.util.HashMap;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.ecoengine.utils.VarCouple;
@ -97,8 +97,8 @@ public abstract class DataAnalysis implements Evaluator{
* The weight of this procedure is the lowest as it runs on local machine
*/
@Override
public WEIGHT getWeight() {
return WEIGHT.LOWEST;
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.LOCAL;
}
/**

View File

@ -2,14 +2,13 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
public interface Generator extends ComputationalAgent{
public ALG_PROPS[] getSupportedAlgorithms();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public WEIGHT getWeight();
public INFRASTRUCTURE getInfrastructure();
public void init();

View File

@ -2,14 +2,14 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
public interface Modeler extends ComputationalAgent{
public ALG_PROPS[] getSupportedModels();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public WEIGHT getWeight();
public INFRASTRUCTURE getInfrastructure();
public void setmodel(Model model);

View File

@ -0,0 +1,25 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDistribution {
//initialization of the distribution model
public void initSingleNode(AlgorithmConfiguration config);
//preprocessing after the whole calculation
public void postProcess();
//get the internal processing status for the single step calculation
public float getInternalStatus();
public int executeNode(int cellOrdinal, int chunksize, int speciesOrdinal, int speciesChunkSize, String pathToFiles, String logfile);
public void setup(AlgorithmConfiguration config) throws Exception;
public int getNumberOfSpecies();
public int getNumberOfGeoInfo();
}

View File

@ -4,7 +4,7 @@ import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.utils.VARTYPE;
@ -69,8 +69,8 @@ public class SimpleModeler implements Modeler{
}
@Override
public WEIGHT getWeight() {
return WEIGHT.LOWEST;
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.LOCAL;
}
public VARTYPE getContentType() {

View File

@ -14,7 +14,7 @@ import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
@ -328,8 +328,8 @@ public class LocalSimpleSplitGenerator implements Generator {
}
@Override
public WEIGHT getWeight() {
return WEIGHT.LOWEST;
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.LOCAL;
}
@Override

View File

@ -16,7 +16,7 @@ import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
@ -487,8 +487,8 @@ public class LocalSplitGenerator implements Generator {
}
@Override
public WEIGHT getWeight() {
return WEIGHT.LOWEST;
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.LOCAL;
}
@Override

View File

@ -6,7 +6,7 @@ import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteGenerationManager;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
@ -174,8 +174,8 @@ public class RainyCloudGenerator implements Generator {
}
@Override
public WEIGHT getWeight() {
return WEIGHT.HIGH;
public INFRASTRUCTURE getInfrastructure() {
return INFRASTRUCTURE.D4SCIENCE;
}
@Override

View File

@ -141,7 +141,7 @@ public class GeneratorsFactory {
int i=0;
boolean inserted = false;
for (Generator g: generators){
if (g.getWeight().compareTo(generator.getWeight())>0){
if (g.getInfrastructure().compareTo(generator.getInfrastructure())>0){
generators.add(i, generator);
inserted = true;
break;

View File

@ -98,7 +98,7 @@ public static List<Modeler> getModelers(AlgorithmConfiguration config) throws Ex
int i=0;
boolean inserted = false;
for (Modeler g: modelers){
if (g.getWeight().compareTo(mod.getWeight())>0){
if (g.getInfrastructure().compareTo(mod.getInfrastructure())>0){
modelers.add(i, mod);
inserted = true;
break;

View File

@ -0,0 +1,48 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;
public class TestBioClimateAnalysis {
public static void main(String args[]) throws Exception{
String dburl = "jdbc:postgresql://node49.p.d4science.research-infrastructures.eu/aquamaps";
String dbUser = "gcube";
String dbPassword = "bilico1980";
BioClimateAnalysis bioClimate=new BioClimateAnalysis("./cfg/","./",dburl, dbUser, dbPassword, true);
final String [] envelopeTables = {
"hspen2012_06_01_21_52_47_460",
"hspen2012_06_01_21_52_47_485",
"hspen2012_06_01_21_52_47_615",
"hspen2012_06_01_21_52_46_795",
"hspen2012_06_02_03_26_13_154",
"hspen2012_06_02_03_26_16_534",
"hspen2012_06_02_03_26_43_412",
"hspen2012_06_02_03_27_26_762",
"hspen2012_06_02_08_54_48_004",
"hspen2012_06_02_08_55_53_415"
};
final String [] envelopeTablesNames = {
"hspen2012_06_01_21_52_47_460",
"hspen2012_06_01_21_52_47_485",
"hspen2012_06_01_21_52_47_615",
"hspen2012_06_01_21_52_46_795",
"hspen2012_06_02_03_26_13_154",
"hspen2012_06_02_03_26_16_534",
"hspen2012_06_02_03_26_43_412",
"hspen2012_06_02_03_27_26_762",
"hspen2012_06_02_08_54_48_004",
"hspen2012_06_02_08_55_53_415"
};
bioClimate.speciesEvolutionAnalysis(envelopeTables,envelopeTablesNames, BioClimateAnalysis.salinityMinFeature, BioClimateAnalysis.salinityDefaultRange);
}
}

View File

@ -2,6 +2,7 @@ package org.gcube.dataanalysis.ecoengine.utils;
public enum VARTYPE {
STRING,
EPR_LIST,
INFRA,
SERVICE,
DATABASEUSERNAME,