added models and modelers management

merry Christmas

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@49603 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Gianpaolo Coro 2011-12-23 13:21:12 +00:00
parent 68c5972557
commit ed7f25af8b
18 changed files with 405 additions and 103 deletions

1
cfg/modelers.properties Normal file
View File

@ -0,0 +1 @@
HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler

View File

@ -1,2 +1 @@
HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN
HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler

View File

@ -5,5 +5,6 @@ public enum ALG_PROPS {
SPECIES_VS_CSQUARE_FROM_DATABASE,
SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE,
SPECIES_VS_CSQUARE,
PHENOMENON_VS_GEOINFO
PHENOMENON_VS_GEOINFO,
SPECIES_ENVELOPES
}

View File

@ -32,6 +32,7 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration{
public static String algorithmsFile = "algorithms.properties";
public static String generatorsFile = "generators.properties";
public static String modelsFile = "models.properties";
public static String modelersFile = "modelers.properties";
public static int chunkSize = 100000;
public static int refreshResourcesTime = 10;

View File

@ -1,6 +1,6 @@
package org.gcube.dataanalysis.ecoengine.configuration;
public enum GENERATOR_WEIGHT {
public enum WEIGHT {
VERY_HIGH,
HIGH,

View File

@ -4,7 +4,7 @@ import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
public interface Generator {
@ -14,7 +14,7 @@ public interface Generator {
public HashMap<String, String> getInputParameters();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public GENERATOR_WEIGHT getWeight();
public WEIGHT getWeight();
public float getStatus();

View File

@ -1,14 +1,30 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public interface Model {
//defines the properties of this algorithm
public ALG_PROPS[] getProperties();
//defines the name of this model
public String getName();
//gets the description of the model
public String getDescription();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
public float getVersion();
public void setVersion(float version);
public String getName();
public void init(Object Input, Model previousModel, Object Destination);
public void init(AlgorithmConfiguration Input, Model previousModel);
public String getResourceLoad();
@ -20,9 +36,9 @@ public interface Model {
public String getOutputType();
public void postprocess(Object Input, Model previousModel, Object Destination);
public void postprocess(AlgorithmConfiguration Input, Model previousModel);
public void train(Object Input, Model previousModel, Object Destination);
public void train(AlgorithmConfiguration Input, Model previousModel);
// gets the type of the content inside the model: e.g. Table Model, Vectorial Model etc.
public String getContentType();

View File

@ -1,10 +1,25 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
public interface Modeler {
public ALG_PROPS[] getSupportedModels();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public WEIGHT getWeight();
public void setmodel(Model model);
public void model(Object Input, Model previousModel, Object Destination);
public void model(AlgorithmConfiguration Input, Model previousModel);
public String getResourceLoad();

View File

@ -1,5 +1,10 @@
package org.gcube.dataanalysis.ecoengine.modeling;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
@ -7,10 +12,10 @@ public class SimpleModeler implements Modeler{
private Model innermodel;
@Override
public void model(Object Input, Model previousModel, Object Destination) {
innermodel.init(Input, previousModel, Destination);
innermodel.train(Input, previousModel, Destination);
innermodel.postprocess(Input, previousModel, Destination);
public void model(AlgorithmConfiguration Input, Model previousModel) {
innermodel.init(Input, previousModel);
innermodel.train(Input, previousModel);
innermodel.postprocess(Input, previousModel);
}
@Override
@ -53,6 +58,22 @@ public class SimpleModeler implements Modeler{
innermodel.stop();
}
@Override
public ALG_PROPS[] getSupportedModels() {
ALG_PROPS[] props = {ALG_PROPS.SPECIES_ENVELOPES};
return props;
}
@Override
public HashMap<String, String> getInputParameters() {
return innermodel.getInputParameters();
}
@Override
public WEIGHT getWeight() {
return WEIGHT.LOWEST;
}

View File

@ -9,6 +9,7 @@ import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
@ -61,36 +62,42 @@ public class ModelHSPEN implements Model {
@Override
public String getName() {
return ModelHSPEN.class.getName();
return "HSPEN";
}
@Override
public void init(Object Input, Model previousModel, Object Destination) {
AlgorithmConfiguration config = (AlgorithmConfiguration) Input;
outconfig = (AlgorithmConfiguration) Destination;
defaultDatabaseFile = config.getConfigPath() + defaultDatabaseFile;
public void init(AlgorithmConfiguration setup, Model previousModel) {
outconfig = setup;
defaultDatabaseFile = setup.getConfigPath() + defaultDatabaseFile;
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
AnalysisLogger.setLogger(setup.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
try {
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
String defaultDatabaseFile = setup.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile;
setup.setDatabaseDriver(setup.getParam("DatabaseDriver"));
setup.setDatabaseUserName(setup.getParam("DatabaseUserName"));
setup.setDatabasePassword(setup.getParam("DatabasePassword"));
setup.setDatabaseURL(setup.getParam("DatabaseURL"));
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, setup);
} catch (Exception e) {
AnalysisLogger.getLogger().debug(e);
e.printStackTrace();
}
// initialize queries
dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicCreateTable = createHspenTable.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicPopulateNewHspen = populateNewHspen.replace("%HSPEN_ORIGIN%", config.getParam("EnvelopeTable")).replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicSpeciesListQuery = speciesListQuery.replace("%HSPEN%", config.getParam("EnvelopeTable"));
dynamicHspenInformationQuery = hspenListQuery.replace("%HSPEN%", config.getParam("EnvelopeTable"));
currentHCAFTable = config.getParam("CsquarecodesTable");
currentOccurrenceTable = config.getParam("OccurrenceCellsTable");
dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable"));
dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable"));
dynamicCreateTable = createHspenTable.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable"));
dynamicPopulateNewHspen = populateNewHspen.replace("%HSPEN_ORIGIN%", outconfig.getParam("EnvelopeTable")).replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable"));
dynamicSpeciesListQuery = speciesListQuery.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicHspenInformationQuery = hspenListQuery.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
currentHCAFTable = outconfig.getParam("CsquarecodesTable");
currentOccurrenceTable = outconfig.getParam("OccurrenceCellsTable");
// Threads
numberOfthreads = config.getNumberOfResources();
numberOfthreads = outconfig.getNumberOfResources();
// interrupt process
interruptProcessing = false;
@ -172,7 +179,7 @@ public class ModelHSPEN implements Model {
}
@Override
public void train(Object Input, Model previousModel, Object Destination) {
public void train(AlgorithmConfiguration Input, Model previousModel) {
long tstart = System.currentTimeMillis();
// INITIALIZATION
try {
@ -340,7 +347,7 @@ public class ModelHSPEN implements Model {
}
@Override
public void postprocess(Object Input, Model previousModel, Object Destination) {
public void postprocess(AlgorithmConfiguration Input, Model previousModel) {
}
@ -405,4 +412,30 @@ public class ModelHSPEN implements Model {
return AlgorithmConfiguration.class.getName();
}
@Override
public ALG_PROPS[] getProperties() {
ALG_PROPS[] props = {ALG_PROPS.SPECIES_ENVELOPES};
return props;
}
@Override
public String getDescription() {
return "Hspen Algorithm";
}
@Override
public HashMap<String, String> getInputParameters() {
HashMap<String, String> parameters = new HashMap<String,String>();
parameters.put("EnvelopeTable", "hspen");
parameters.put("CsquarecodesTable", "hcaf_d");
parameters.put("OccurrenceCellsTable", "occurrencecells");
parameters.put("CreateTable", "true");
parameters.put("OuputEnvelopeTable", "");
parameters.put("DatabaseUserName","");
parameters.put("DatabasePassword","");
parameters.put("DatabaseURL","");
parameters.put("DatabaseDriver","");
return parameters;
}
}

View File

@ -14,7 +14,7 @@ import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
@ -326,8 +326,8 @@ public class LocalSimpleSplitGenerator implements Generator {
}
@Override
public GENERATOR_WEIGHT getWeight() {
return GENERATOR_WEIGHT.LOWEST;
public WEIGHT getWeight() {
return WEIGHT.LOWEST;
}
@Override

View File

@ -16,7 +16,7 @@ import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
@ -469,8 +469,8 @@ public class LocalSplitGenerator implements Generator {
}
@Override
public GENERATOR_WEIGHT getWeight() {
return GENERATOR_WEIGHT.LOWEST;
public WEIGHT getWeight() {
return WEIGHT.LOWEST;
}
@Override

View File

@ -6,7 +6,7 @@ import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.WEIGHT;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteGenerationManager;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
@ -172,8 +172,8 @@ public class RainyCloudGenerator implements Generator {
}
@Override
public GENERATOR_WEIGHT getWeight() {
return GENERATOR_WEIGHT.HIGH;
public WEIGHT getWeight() {
return WEIGHT.HIGH;
}
@Override

View File

@ -56,70 +56,25 @@ public class GeneratorsFactory {
public static List<String> getProbabilityDistributionAlgorithms(String configPath) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.algorithmsFile);
List<String> algs = new ArrayList<String>();
for (Object algName: p.keySet()){
algs.add((String)algName);
}
List<String> algs = ProcessorsFactory.getClasses(configPath + AlgorithmConfiguration.algorithmsFile);
return algs;
}
public static List<String> getAllGenerators(String configPath) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.generatorsFile);
List<String> gens = new ArrayList<String>();
for (Object genName: p.keySet()){
gens.add((String)genName);
}
List<String> gens = ProcessorsFactory.getClasses(configPath + AlgorithmConfiguration.generatorsFile);
return gens;
}
public static List<String> getModels(String configPath) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.modelsFile);
List<String> models = new ArrayList<String>();
for (Object modelName: p.keySet()){
models.add((String)modelName);
}
return models;
}
public static HashMap<String,String> getAlgorithmParameters(String configPath, String algorithmName) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.algorithmsFile);
String algorithmclass = p.getProperty(algorithmName);
Object algclass = Class.forName(algorithmclass).newInstance();
//if the algorithm is a generator itself then take it
if (algclass instanceof Generator){
return ((Generator) algclass).getInputParameters();
}
else
return ((SpatialProbabilityDistribution) algclass).getInputParameters();
public static HashMap<String,String> getAlgorithmParameters(String configPath, String algorithmName) throws Exception{
HashMap<String,String> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.algorithmsFile, algorithmName);
return inputs;
}
public static Generator getGenerator(AlgorithmConfiguration config) throws Exception {
//modify this class in order to take the right generator algorithm
try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
//take the algorithm
String algorithm = config.getGenerator();
if (algorithm == null) throw new Exception("GENERATOR NOT SPECIFIED");
//take the algorithms list
Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.generatorsFile);
String algorithmclass = p.getProperty(algorithm);
Object algclass = Class.forName(algorithmclass).newInstance();
Generator g = (Generator) algclass;
g.setConfiguration(config);
g.init();
return g;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
return (Generator)ProcessorsFactory.getProcessor(config, config.getConfigPath() + AlgorithmConfiguration.generatorsFile);
}

View File

@ -1,30 +1,111 @@
package org.gcube.dataanalysis.ecoengine.processing.factories;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
public class ModelersFactory {
public static Modeler getGenerator(AlgorithmConfiguration config) throws Exception {
public static Modeler getModeler(AlgorithmConfiguration config) throws Exception {
Modeler modlr = (Modeler)ProcessorsFactory.getProcessor(config, config.getConfigPath() + AlgorithmConfiguration.generatorsFile);
return modlr;
}
public static List<String> getModels(String configPath) throws Exception{
List<String> models = ProcessorsFactory.getClasses(configPath + AlgorithmConfiguration.modelsFile);
return models;
}
public static List<String> getModelers(String configPath) throws Exception{
List<String> modelers = ProcessorsFactory.getClasses(configPath + AlgorithmConfiguration.modelersFile);
return modelers;
}
public static HashMap<String,String> getModelParameters(String configPath, String algorithmName) throws Exception{
HashMap<String,String> inputs = ProcessorsFactory.getParameters(configPath + AlgorithmConfiguration.modelsFile, algorithmName);
return inputs;
}
public static List<Modeler> getModelers(AlgorithmConfiguration config) throws Exception {
//modify this class in order to manage generators weight and match algorithm vs generators
List<Modeler> modelers = new ArrayList<Modeler>();
try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
//take the algorithm
String algorithm = config.getModel();
//take the algorithms list
Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.modelsFile);
String objectclass = p.getProperty(config.getModel() + "_MODELER");
Modeler g = (Modeler) Class.forName(objectclass).newInstance();
String modelclass = p.getProperty(config.getModel());
Model m = (Model) Class.forName(modelclass).newInstance();
g.setmodel(m);
return g;
String algorithmclass = p.getProperty(algorithm);
Object algclass = Class.forName(algorithmclass).newInstance();
//if the algorithm is a generator itself then execute it
if (algclass instanceof Modeler){
Modeler g = (Modeler) algclass;
modelers.add(g);
}
else
{
Model mod = (Model) algclass;
//take alg's properties
ALG_PROPS[] algp = mod.getProperties();
//take all generators
Properties pg = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.modelersFile);
//investigate on possible suitable modelers
for (Object modelerName:pg.values()){
Modeler gen = (Modeler)Class.forName((String)modelerName).newInstance();
ALG_PROPS[] supportedAlgs = gen.getSupportedModels();
boolean genSuitable = false;
for (ALG_PROPS prop:algp){
for (ALG_PROPS gprop:supportedAlgs){
if (gprop == prop){
genSuitable = true;
gen.setmodel(mod);
break;
}
}
}
//if suitable generator was found then add it at the right place in the list
if (genSuitable){
addModeler2List(modelers,gen);
}
}
}
return modelers;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
//adds a generator to a sorted generators list
public static void addModeler2List(List<Modeler> modelers, Modeler mod){
int i=0;
boolean inserted = false;
for (Modeler g: modelers){
if (g.getWeight().compareTo(mod.getWeight())>0){
modelers.add(i, mod);
inserted = true;
break;
}
i++;
}
if (!inserted)
modelers.add(mod);
}
}

View File

@ -0,0 +1,80 @@
package org.gcube.dataanalysis.ecoengine.processing.factories;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
public class ProcessorsFactory {
public static List<String> getClasses(String file) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(file);
List<String> algs = new ArrayList<String>();
for (Object algName: p.keySet()){
algs.add((String)algName);
}
return algs;
}
public static HashMap<String,String> getParameters(String file, String algorithmName) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(file);
String algorithmclass = p.getProperty(algorithmName);
Object algclass = Class.forName(algorithmclass).newInstance();
//if the algorithm is a generator itself then take it
if (algclass instanceof Generator){
return ((Generator) algclass).getInputParameters();
}
else if (algclass instanceof Modeler){
return ((Modeler) algclass).getInputParameters();
}
else if (algclass instanceof Model){
return ((Model) algclass).getInputParameters();
}
else
return ((SpatialProbabilityDistribution) algclass).getInputParameters();
}
public static Object getProcessor(AlgorithmConfiguration config,String file) throws Exception {
//modify this class in order to take the right generator algorithm
try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
//take the algorithm
String algorithm = config.getGenerator();
if (algorithm == null) throw new Exception("PROCESSOR NOT SPECIFIED");
//take the algorithms list
Properties p = AlgorithmConfiguration.getProperties(file);
String algorithmclass = p.getProperty(algorithm);
Object algclass = Class.forName(algorithmclass).newInstance();
if (algclass instanceof Generator){
Generator g = (Generator) algclass;
g.setConfiguration(config);
g.init();
return g;
}
else if (algclass instanceof Modeler){
Modeler m = (Modeler) algclass;
return m;
}
else return null;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}

View File

@ -0,0 +1,90 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
public class TestsHSPENTraining {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Modeler> modelers=ModelersFactory.getModelers(testConfig());
train(modelers.get(0),testConfig());
modelers = null;
}
private static void train(Modeler generator,AlgorithmConfiguration config) throws Exception {
if (generator != null) {
TestsHSPENTraining tgs = new TestsHSPENTraining();
ThreadCalculator tc = tgs.new ThreadCalculator(generator, config);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
System.out.println("FINAL STATUS: " + generator.getStatus());
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Modeler dg;
AlgorithmConfiguration config;
public ThreadCalculator(Modeler dg,AlgorithmConfiguration config) {
this.dg = dg;
this.config = config;
}
public void run() {
try {
dg.model(config, null);
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(2);
config.setModel("HSPEN");
config.setParam("OuputEnvelopeTable","hspen_trained");
config.setParam("OccurrenceCellsTable","occurrencecells");
config.setParam("EnvelopeTable","hspen_mini");
config.setParam("CsquarecodesTable", "hcaf_d");
config.setParam("CreateTable","true");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
return config;
}
}

View File

@ -7,6 +7,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
public class TestsMetaInfo {
/**
@ -34,7 +35,15 @@ public static void main(String[] args) throws Exception {
System.out.println("Gens: "+GeneratorsFactory.getAllGenerators("./cfg/"));
System.out.println("\n***TEST 5 - Get All Models to be trained ***");
System.out.println("Models: "+GeneratorsFactory.getModels("./cfg/"));
System.out.println("Models: "+ModelersFactory.getModels("./cfg/"));
System.out.println("\n***TEST 6 - Get All Modelers ***");
System.out.println("Models: "+ModelersFactory.getModelers("./cfg/"));
System.out.println("\n***TEST 7- Get All Model parameters ***");
map = ModelersFactory.getModelParameters("./cfg/","HSPEN");
System.out.println("input for HSPEN algorithm: "+map);
}