This commit is contained in:
Gianpaolo Coro 2012-11-30 17:55:01 +00:00
parent ad3ce33d46
commit 8441ebc381
33 changed files with 226 additions and 764 deletions

View File

@ -12,6 +12,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.Hspen;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
@ -71,7 +72,7 @@ public class BioClimateAnalysis {
private double[] avgSST;
private double[] avgSalinity;
private Evaluator eval;
private ComputationalAgent eval;
private float status;
private boolean liveRender;

View File

@ -6,6 +6,8 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
public class ClusterersFactory {
@ -29,9 +31,10 @@ public class ClusterersFactory {
return input;
}
public static List<Clusterer> getClusterers(AlgorithmConfiguration config) throws Exception {
List<Clusterer> clusterers = new ArrayList<Clusterer>();
public static List<ComputationalAgent> getClusterers(AlgorithmConfiguration config) throws Exception {
List<ComputationalAgent> clusterers = new ArrayList<ComputationalAgent>();
clusterers.add(getClusterer(config));
ProcessorsFactory.addAgent2List(clusterers,GeneratorsFactory.getGenerator(config));
return clusterers;
}

View File

@ -5,7 +5,9 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
public class EvaluatorsFactory {
@ -30,9 +32,10 @@ public class EvaluatorsFactory {
return input;
}
public static List<Evaluator> getEvaluators(AlgorithmConfiguration config) throws Exception {
List<Evaluator> evaluators = new ArrayList<Evaluator>();
public static List<ComputationalAgent> getEvaluators(AlgorithmConfiguration config) throws Exception {
List<ComputationalAgent> evaluators = new ArrayList<ComputationalAgent>();
evaluators.add(getEvaluator(config));
ProcessorsFactory.addAgent2List(evaluators,GeneratorsFactory.getGenerator(config));
return evaluators;
}

View File

@ -9,6 +9,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
@ -81,10 +82,10 @@ public class GeneratorsFactory {
}
public static List<Generator> getGenerators(AlgorithmConfiguration config) throws Exception {
public static List<ComputationalAgent> getGenerators(AlgorithmConfiguration config) throws Exception {
//modify this class in order to manage generators weight and match algorithm vs generators
List<Generator> generators = new ArrayList<Generator>();
List<ComputationalAgent> generators = new ArrayList<ComputationalAgent>();
try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
@ -140,10 +141,12 @@ public class GeneratorsFactory {
}
//adds a generator to a sorted generators list
public static void addGenerator2List(List<Generator> generators, Generator generator){
public static void addGenerator2List(List<ComputationalAgent> generators, Generator generator){
if (generator == null)
return;
int i=0;
boolean inserted = false;
for (Generator g: generators){
for (ComputationalAgent g: generators){
if (g.getInfrastructure().compareTo(generator.getInfrastructure())>0){
generators.add(i, generator);
inserted = true;

View File

@ -8,6 +8,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
@ -40,10 +41,10 @@ public class ModelersFactory {
return input;
}
public static List<Modeler> getModelers(AlgorithmConfiguration config) throws Exception {
public static List<ComputationalAgent> getModelers(AlgorithmConfiguration config) throws Exception {
//modify this class in order to manage generators weight and match algorithm vs generators
List<Modeler> modelers = new ArrayList<Modeler>();
List<ComputationalAgent> modelers = new ArrayList<ComputationalAgent>();
try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
@ -99,10 +100,10 @@ public static List<Modeler> getModelers(AlgorithmConfiguration config) throws Ex
}
//adds a generator to a sorted generators list
public static void addModeler2List(List<Modeler> modelers, Modeler mod){
public static void addModeler2List(List<ComputationalAgent> modelers, Modeler mod){
int i=0;
boolean inserted = false;
for (Modeler g: modelers){
for (ComputationalAgent g: modelers){
if (g.getInfrastructure().compareTo(mod.getInfrastructure())>0){
modelers.add(i, mod);
inserted = true;

View File

@ -1,5 +1,6 @@
package org.gcube.dataanalysis.ecoengine.processing.factories;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -9,11 +10,12 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.interfaces.GenericAlgorithm;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
@ -94,18 +96,25 @@ public class ProcessorsFactory {
}
public static Object getProcessor(AlgorithmConfiguration config, String file) throws Exception {
public static ComputationalAgent getProcessor(AlgorithmConfiguration config, String file) throws Exception {
return getProcessor(config, file,null);
}
public static ComputationalAgent getProcessor(AlgorithmConfiguration config, String file,String explicitAlgorithm) throws Exception {
// modify this class in order to take the right generator algorithm
try {
// initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// take the algorithm
String algorithm = config.getAgent();
String algorithm = explicitAlgorithm;
if (explicitAlgorithm==null)
algorithm = config.getAgent();
if (algorithm == null)
throw new Exception("PROCESSOR NOT SPECIFIED");
// take the algorithms list
Properties p = AlgorithmConfiguration.getProperties(file);
String algorithmclass = p.getProperty(algorithm);
if (algorithmclass==null)
return null;
Object algclass = Class.forName(algorithmclass).newInstance();
if (algclass instanceof Generator) {
Generator g = (Generator) algclass;
@ -153,4 +162,21 @@ public class ProcessorsFactory {
return map;
}
//adds a generator to a sorted generators list
public static void addAgent2List(List<ComputationalAgent> agents, ComputationalAgent agent){
if (agent == null)
return;
int i=0;
boolean inserted = false;
for (ComputationalAgent g: agents){
if (g.getInfrastructure().compareTo(agent.getInfrastructure())>0){
agents.add(i, agent);
inserted = true;
break;
}
i++;
}
if (!inserted)
agents.add(agent);
}
}

View File

@ -5,7 +5,7 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
public class TransducerersFactory {
@ -30,10 +30,12 @@ public class TransducerersFactory {
return input;
}
public static List<Transducerer> getTransducerers(AlgorithmConfiguration config) throws Exception {
List<Transducerer> clusterers = new ArrayList<Transducerer>();
clusterers.add(getTransducerer(config));
return clusterers;
public static List<ComputationalAgent> getTransducerers(AlgorithmConfiguration config) throws Exception {
List<ComputationalAgent> trans = new ArrayList<ComputationalAgent>();
trans.add(getTransducerer(config));
ProcessorsFactory.addAgent2List(trans,GeneratorsFactory.getGenerator(config));
return trans;
}
}

View File

@ -6,6 +6,7 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
@ -64,7 +65,7 @@ public static void main(String[] args) throws Exception {
System.out.println("Database Default Values: "+map);
System.out.println("\n***TEST 11- Get Evaluators with a config***");
List<Evaluator> eval = EvaluatorsFactory.getEvaluators(testConfigEvaluator());
List<ComputationalAgent> eval = EvaluatorsFactory.getEvaluators(testConfigEvaluator());
System.out.println("Database Default Values: "+eval);
System.out.println("\n***TEST 12- Get All Supported features***");

View File

@ -5,6 +5,7 @@ import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -25,7 +26,7 @@ public static void main(String[] args) throws Exception {
*/
System.out.println("TEST 2");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal());
List<ComputationalAgent> generators = GeneratorsFactory.getGenerators(testConfigLocal());
generators.get(0).init();
generate(generators.get(0));
generators = null;
@ -33,7 +34,7 @@ public static void main(String[] args) throws Exception {
}
private static void generate(Generator generator) throws Exception {
private static void generate(ComputationalAgent generator) throws Exception {
if (generator != null) {
RegressionComplexGeneration tgs = new RegressionComplexGeneration();
@ -44,10 +45,10 @@ public static void main(String[] args) throws Exception {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
// String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
// System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
@ -57,9 +58,9 @@ public static void main(String[] args) throws Exception {
}
public class ThreadCalculator implements Runnable {
Generator dg;
ComputationalAgent dg;
public ThreadCalculator(Generator dg) {
public ThreadCalculator(ComputationalAgent dg) {
this.dg = dg;
}

View File

@ -4,6 +4,7 @@ import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -16,7 +17,7 @@ public class RegressionSimpleGeneration {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfig1());
List<ComputationalAgent> generators = GeneratorsFactory.getGenerators(testConfig1());
generators.get(0).init();
generate(generators.get(0));
generators = null;
@ -30,7 +31,7 @@ public static void main(String[] args) throws Exception {
}
private static void generate(Generator generator) throws Exception {
private static void generate(ComputationalAgent generator) throws Exception {
if (generator != null) {
RegressionSimpleGeneration tgs = new RegressionSimpleGeneration();
@ -41,10 +42,10 @@ public static void main(String[] args) throws Exception {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
// String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
// System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
@ -54,9 +55,9 @@ public static void main(String[] args) throws Exception {
}
public class ThreadCalculator implements Runnable {
Generator dg;
ComputationalAgent dg;
public ThreadCalculator(Generator dg) {
public ThreadCalculator(ComputationalAgent dg) {
this.dg = dg;
}

View File

@ -9,6 +9,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -21,14 +22,14 @@ public class TestBayesianModels {
*
*/
public static void main1(String[] args) throws Exception {
public static void main(String[] args) throws Exception {
/*
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal1());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
*/
List<Modeler> generators = ModelersFactory.getModelers(testConfigLocal1());
List<ComputationalAgent> generators = ModelersFactory.getModelers(testConfigLocal1());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
@ -36,9 +37,9 @@ public static void main1(String[] args) throws Exception {
}
public static void main(String[] args) throws Exception {
public static void main1(String[] args) throws Exception {
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal2());
List<ComputationalAgent> generators = GeneratorsFactory.getGenerators(testConfigLocal2());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;

View File

@ -9,6 +9,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -21,14 +22,14 @@ public class TestBayesianModelsDBTest {
*
*/
public static void main1(String[] args) throws Exception {
public static void main(String[] args) throws Exception {
/*
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal1());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
*/
List<Modeler> generators = ModelersFactory.getModelers(testConfigLocal1());
List<ComputationalAgent> generators = ModelersFactory.getModelers(testConfigLocal1());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
@ -36,9 +37,9 @@ public static void main1(String[] args) throws Exception {
}
public static void main(String[] args) throws Exception {
public static void main1(String[] args) throws Exception {
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal2());
List<ComputationalAgent> generators = GeneratorsFactory.getGenerators(testConfigLocal2());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;

View File

@ -6,6 +6,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.clustering.DBScan;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
@ -18,14 +19,14 @@ public class TestClusterer {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Clusterer> clus = ClusterersFactory.getClusterers(testConfigLocal2());
List<ComputationalAgent> clus = ClusterersFactory.getClusterers(testConfigLocal2());
clus .get(0).init();
cluster(clus .get(0));
clus = null;
}
private static void cluster(Clusterer clus) throws Exception {
private static void cluster(ComputationalAgent clus) throws Exception {
if (clus != null) {
TestClusterer tgs = new TestClusterer();
@ -43,9 +44,9 @@ public class TestClusterer {
}
public class ThreadCalculator implements Runnable {
Clusterer dg;
ComputationalAgent dg;
public ThreadCalculator(Clusterer dg) {
public ThreadCalculator(ComputationalAgent dg) {
this.dg = dg;
}

View File

@ -10,6 +10,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
@ -57,7 +58,7 @@ public static void main(String[] args) throws Exception {
*/
// List<Evaluator> trans = null;
// trans = EvaluatorsFactory.getEvaluators(testConfigLocal12());
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal5b());
List<ComputationalAgent> trans = TransducerersFactory.getTransducerers(testConfigLocal5b());
trans.get(0).init();
Regressor.process(trans.get(0));
@ -166,10 +167,10 @@ public static void main(String[] args) throws Exception {
config.setParam("scientificNameColumn", "scientificname");
config.setParam("eventDateColumn", "eventdate");
config.setParam("lastModificationColumn", "modified");
/*
config.setParam("rightTableName", "occurrence_species2");
config.setParam("leftTableName", "occurrence_species1");
*/
/*
config.setParam("rightTableName", "occurrence_species_id1e8f7b48_b99a_48a3_8b52_89976fd79cd4");
config.setParam("leftTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
@ -177,8 +178,8 @@ public static void main(String[] args) throws Exception {
//"processedoccurrences_id_e7b77fc2_f1cf_4a46_b7b7_898b663b65dd" OBIS
//"processedoccurrences_id_bd3fdae3_a64e_4215_8eb3_c1bd95981dd2" GBIF
config.setParam("leftTableName", "processedoccurrences_id_e7b77fc2_f1cf_4a46_b7b7_898b663b65dd");
config.setParam("rightTableName", "processedoccurrences_id_bd3fdae3_a64e_4215_8eb3_c1bd95981dd2");
// config.setParam("leftTableName", "processedoccurrences_id_e7b77fc2_f1cf_4a46_b7b7_898b663b65dd");
// config.setParam("rightTableName", "processedoccurrences_id_bd3fdae3_a64e_4215_8eb3_c1bd95981dd2");
config.setParam("finalTableName", "occurrencessubtractedarticle3");

View File

@ -7,6 +7,7 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
@ -19,7 +20,7 @@ public class TestEvaluation {
public static void main(String[] args) throws Exception {
//test Quality
List<Evaluator> evaluators = EvaluatorsFactory.getEvaluators(testQuality());
List<ComputationalAgent> evaluators = EvaluatorsFactory.getEvaluators(testQuality());
evaluate(evaluators.get(0),testQuality());
evaluators = null;
@ -33,7 +34,7 @@ public static void main(String[] args) throws Exception {
}
private static void evaluate(Evaluator evaluator, AlgorithmConfiguration config) throws Exception {
private static void evaluate(ComputationalAgent evaluator, AlgorithmConfiguration config) throws Exception {
if (evaluator != null) {
TestEvaluation tgs = new TestEvaluation();
@ -64,10 +65,10 @@ public static void main(String[] args) throws Exception {
}
public class ThreadCalculator implements Runnable {
Evaluator dg;
ComputationalAgent dg;
AlgorithmConfiguration config;
public ThreadCalculator(Evaluator dg, AlgorithmConfiguration config) {
public ThreadCalculator(ComputationalAgent dg, AlgorithmConfiguration config) {
this.dg = dg;
this.config = config;
}

View File

@ -11,6 +11,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
@ -59,7 +60,7 @@ public static void main(String[] args) throws Exception {
*/
// List<Evaluator> trans = null;
// trans = EvaluatorsFactory.getEvaluators(testConfigLocal12());
List<Clusterer> trans = ClusterersFactory.getClusterers(testConfigLocal2());
List<ComputationalAgent> trans = ClusterersFactory.getClusterers(testConfigLocal2());
trans.get(0).init();
Regressor.process(trans.get(0));

View File

@ -1,91 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class TestSingleHSPECGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal());
generators.get(0).init();
generate(generators.get(0));
generators = null;
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
TestSingleHSPECGeneration tgs = new TestSingleHSPECGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.compute();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(5);
config.setModel("AQUAMAPS_SUITABLE");
config.setParam("DistributionTable","hspec_suitable_baskingshark_aquamaps");
config.setParam("CsquarecodesTable","hcaf_d");
config.setParam("EnvelopeTable","hspen_baskingshark");
config.setParam("PreprocessedTable", "maxminlat_hspen");
config.setParam("CreateTable","true");
/*
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
*/
return config;
}
}

View File

@ -1,84 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
public class TestTransducer {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
transduce(trans.get(0));
trans = null;
}
private static void transduce(Transducerer trans) throws Exception {
if (trans != null) {
TestTransducer tgs = new TestTransducer();
ThreadCalculator tc = tgs.new ThreadCalculator(trans);
Thread t = new Thread(tc);
t.start();
while (trans.getStatus() < 100) {
System.out.println("STATUS: " + trans.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Transducerer dg;
public ThreadCalculator(Transducerer dg) {
this.dg = dg;
}
public void run() {
try {
dg.compute();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(1);
config.setAgent("BIOCLIMATE_HSPEC");
//config.setParam("HSPEC_TABLE_LIST", "hcaf_d, hcaf_d_2015_LINEAR_01338580273835,hcaf_d_2018_LINEAR_11338580276548,hcaf_d_2021_LINEAR_21338580279237,hcaf_d_2024_LINEAR_31338580282780,hcaf_d_2027_LINEAR_41338580283400,hcaf_d_2030_LINEAR_51338580284030,hcaf_d_2033_LINEAR_61338580284663,hcaf_d_2036_LINEAR_71338580285205,hcaf_d_2039_LINEAR_81338580285958,hcaf_d_2042_LINEAR_91338580286545,hcaf_d_2050");
//config.setParam("HSPEC_TABLE_NAMES", "test,test,test,test,test,test,test,test,test,test,test,test");
config.setParam("HSPEC_TABLE_LIST", "hspec_validation"+AlgorithmConfiguration.getListSeparator()+"hspec_validation2");
config.setParam("HSPEC_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test");
config.setParam("Threshold", "0.5");
config.setParam("DatabaseUserName", "gcube");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
return config;
}
}

View File

@ -1,187 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestTransducers {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Transducerer> trans = null;
/*
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal2());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal3());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal4());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
*/
/*
trans = TransducerersFactory.getTransducerers(testConfigLocal8());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal6());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
*/
trans = TransducerersFactory.getTransducerers(testConfigLocal7());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("BIOCLIMATE_HSPEC");
config.setParam("HSPEC_TABLE_LIST", "hspec_validation"+AlgorithmConfiguration.getListSeparator()+"hspec_validation2");
config.setParam("HSPEC_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test");
config.setParam("Threshold", "0.5");
return config;
}
private static AlgorithmConfiguration testConfigLocal2() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("BIOCLIMATE_HCAF");
config.setParam("HCAF_TABLE_LIST","hcaf_d"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2016_linear_01332632269756"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2016_linear_01336062995861"+AlgorithmConfiguration.getListSeparator()+"hcaf_d_2050");
config.setParam("HCAF_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test");
return config;
}
private static AlgorithmConfiguration testConfigLocal3() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("BIOCLIMATE_HSPEN");
config.setParam("HSPEN_TABLE_LIST","hspen"+AlgorithmConfiguration.getListSeparator()+"hspen_2016"+AlgorithmConfiguration.getListSeparator()+"hspen_2020"+AlgorithmConfiguration.getListSeparator()+"hspen_2050");
config.setParam("HSPEN_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test"+AlgorithmConfiguration.getListSeparator()+"test");
return config;
}
private static AlgorithmConfiguration testConfigLocal4() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("HCAF_INTERPOLATION");
config.setParam("FirstHCAF","hcaf_d");
config.setParam("SecondHCAF","hcaf_d_2050");
config.setParam("YearStart","2012");
config.setParam("YearEnd","2050");
config.setParam("NumberOfInterpolations","2");
config.setParam("InterpolationFunction",INTERPOLATIONFUNCTIONS.LINEAR.name());
return config;
}
private static AlgorithmConfiguration testConfigLocal5() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("OCCURRENCES_MERGER");
config.setParam("longitudeColumn", "decimallongitude");
config.setParam("latitudeColumn", "decimallatitude");
config.setParam("recordedByColumn", "recordedby");
config.setParam("scientificNameColumn", "scientificname");
config.setParam("eventDateColumn", "eventdate");
config.setParam("lastModificationColumn", "modified");
config.setParam("rightTableName", "occurrencetestduplicates2");
config.setParam("leftTableName", "occurrencetestduplicates");
config.setParam("finalTableName", "occurrencesmerged");
config.setParam("spatialTolerance", "0.5");
config.setParam("confidence", "90");
return config;
}
private static AlgorithmConfiguration testConfigLocal6() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("OCCURRENCES_INSEAS_ONEARTH");
config.setParam("longitudeColumn", "decimallongitude");
config.setParam("latitudeColumn", "decimallatitude");
config.setParam("OccurrencePointsTableName", "whitesharkoccurrences2");
config.setParam("finalTableName", "whitesharkoccurrencesfilteredseas");
config.setParam("FilterType", "IN_THE_WATER");
// config.setParam("FilterType", "ON_EARTH");
return config;
}
private static AlgorithmConfiguration testConfigLocal7() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("OCCURRENCES_DUPLICATE_DELETER");
config.setParam("longitudeColumn", "decimallongitude");
config.setParam("latitudeColumn", "decimallatitude");
config.setParam("recordedByColumn", "recordedby");
config.setParam("scientificNameColumn", "scientificname");
config.setParam("eventDateColumn", "eventdate");
config.setParam("lastModificationColumn", "modified");
config.setParam("OccurrencePointsTableName", "occurrence_species_id5397d3c4_a7eb_4227_adb4_d9d7d36e8b21");
config.setParam("finalTableName", "occurrencesnoduplicates");
config.setParam("spatialTolerance", "0.5");
config.setParam("confidence", "80");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://dbtest.next.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
return config;
}
private static AlgorithmConfiguration testConfigLocal8() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("OCCURRENCES_INTERSECTOR");
config.setParam("longitudeColumn", "decimallongitude");
config.setParam("latitudeColumn", "decimallatitude");
config.setParam("recordedByColumn", "recordedby");
config.setParam("scientificNameColumn", "scientificname");
config.setParam("eventDateColumn", "eventdate");
config.setParam("lastModificationColumn", "modified");
config.setParam("rightTableName", "occurrencetestduplicates2");
config.setParam("leftTableName", "occurrencetestduplicates");
config.setParam("finalTableName", "occurrencesintersected");
config.setParam("spatialTolerance", "0.5");
config.setParam("confidence", "80");
return config;
}
}

View File

@ -1,77 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class TestsDUMMYGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfig());
generators.get(0).init();
generate(generators.get(0));
generators = null;
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
TestsDUMMYGeneration tgs = new TestsDUMMYGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.compute();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(10);
config.setModel("DUMMY");
return config;
}
}

View File

@ -4,6 +4,7 @@ import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
@ -16,13 +17,13 @@ public class TestsHSPENTraining {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Modeler> modelers=ModelersFactory.getModelers(testConfig());
List<ComputationalAgent> modelers=ModelersFactory.getModelers(testConfig());
train(modelers.get(0),testConfig());
modelers = null;
}
private static void train(Modeler modeler,AlgorithmConfiguration config) throws Exception {
private static void train(ComputationalAgent modeler,AlgorithmConfiguration config) throws Exception {
if (modeler != null) {
TestsHSPENTraining tgs = new TestsHSPENTraining();
@ -45,10 +46,10 @@ public static void main(String[] args) throws Exception {
}
public class ThreadCalculator implements Runnable {
Modeler dg;
ComputationalAgent dg;
AlgorithmConfiguration config;
public ThreadCalculator(Modeler dg,AlgorithmConfiguration config) {
public ThreadCalculator(ComputationalAgent dg,AlgorithmConfiguration config) {
this.dg = dg;
this.config = config;
}
@ -56,7 +57,7 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
dg.model(null);
dg.compute();
} catch (Exception e) {
}

View File

@ -1,77 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class TestsTESTGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfig());
generators.get(0).init();
generate(generators.get(0));
generators = null;
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
TestsTESTGeneration tgs = new TestsTESTGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.compute();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(2);
config.setModel("TEST");
return config;
}
}

View File

@ -6,6 +6,7 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
@ -50,7 +51,7 @@ public class ExperimentsForLatimeria {
public static void generateHSPENTable() throws Exception{
System.out.println("*****************************HSPEN FILTER**********************************");
List<Transducerer> trans = null;
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(PresetConfigGenerator.configHSPENfilter(envelopeTable, speciesID));
trans.get(0).init();
Regressor.process(trans.get(0));
@ -58,7 +59,7 @@ public class ExperimentsForLatimeria {
}
public static void generateAquamapsNativeSuitable() throws Exception{
List<Generator> generators = null;
List<ComputationalAgent> generators = null;
System.out.println("*****************************AQUAMAPS SUITABLE**********************************");
generators = GeneratorsFactory.getGenerators(PresetConfigGenerator.configAquamapsSuitable(aquamapsSuitableTable,envelopeTable));
generators.get(0).init();
@ -75,7 +76,7 @@ public class ExperimentsForLatimeria {
public static void generatePresenceTable() throws Exception{
System.out.println("*****************************PRESENCE TABLE**********************************");
List<Transducerer> trans = null;
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(PresetConfigGenerator.configPresenceTable(presenceTable, -1, speciesID));
trans.get(0).init();
Regressor.process(trans.get(0));
@ -85,7 +86,7 @@ public class ExperimentsForLatimeria {
public static void generateRandomAbsenceTable() throws Exception{
System.out.println("*****************************RANDOM ABSENCES**********************************");
List<Transducerer> trans = null;
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(PresetConfigGenerator.configAbsenceTable(true, absenceRandomTable, aquamapsNativeTable, numberOfPoints, speciesID));
trans.get(0).init();
Regressor.process(trans.get(0));
@ -94,7 +95,7 @@ public class ExperimentsForLatimeria {
public static void generateAbsenceTable() throws Exception{
System.out.println("*****************************ABSENCES**********************************");
List<Transducerer> trans = null;
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(PresetConfigGenerator.configAbsenceTable(false, absenceStaticTable, aquamapsNativeTable, numberOfPoints, speciesID));
trans.get(0).init();
Regressor.process(trans.get(0));
@ -103,7 +104,7 @@ public class ExperimentsForLatimeria {
public static void trainNeuralNetworks() throws Exception{
List<Modeler> modelers = null;
List<ComputationalAgent> modelers = null;
System.out.println("*****************************TRAINING NN SUITABLE WITH STATIC ABSENCES**********************************");
modelers = ModelersFactory.getModelers(PresetConfigGenerator.configSuitableNeuralNetworkTraining(presenceTable,absenceStaticTable,staticsuitable,speciesID,"100"+AlgorithmConfiguration.getListSeparator()+"2",nnname));
modelers.get(0).init();
@ -133,7 +134,7 @@ public class ExperimentsForLatimeria {
public static void trainSuitableNeuralNetworks() throws Exception{
List<Modeler> modelers = null;
List<ComputationalAgent> modelers = null;
System.out.println("*****************************TRAINING NN SUITABLE WITH STATIC ABSENCES**********************************");
modelers = ModelersFactory.getModelers(PresetConfigGenerator.configSuitableNeuralNetworkTraining(presenceTable,absenceStaticTable,staticsuitable,speciesID,"100"+AlgorithmConfiguration.getListSeparator()+"2",nnname));
modelers.get(0).init();
@ -144,7 +145,7 @@ public class ExperimentsForLatimeria {
public static void trainNativeNeuralNetworks() throws Exception{
System.out.println("*****************************TRAINING NN NATIVE WITH STATIC ABSENCES**********************************");
List<Modeler> modelers = null;
List<ComputationalAgent> modelers = null;
modelers = ModelersFactory.getModelers(PresetConfigGenerator.configNativeNeuralNetworkTraining(presenceTable,absenceStaticTable,staticnative,speciesID,"100"+AlgorithmConfiguration.getListSeparator()+"2",nnname));
modelers.get(0).init();
Regressor.process(modelers.get(0));
@ -154,7 +155,7 @@ public class ExperimentsForLatimeria {
public static void generateAquamapsNativeSuitableNeuralNetwokrs() throws Exception{
List<Generator> generators = null;
List<ComputationalAgent> generators = null;
System.out.println("*****************************GENERATING NN SUITABLE WITH STATIC ABSENCES**********************************");
generators = GeneratorsFactory.getGenerators(PresetConfigGenerator.configAquamapsNNSuitable(nnsuitableTable,staticsuitable,envelopeTable,speciesID,nnname));
generators.get(0).init();
@ -185,7 +186,7 @@ public class ExperimentsForLatimeria {
public static void calcdiscrepancy(String table1,String table2) throws Exception{
System.out.println("*****************************DISCREPANCY: "+table1+" vs "+table2+"************************************");
List<Evaluator> evaluators = null;
List<ComputationalAgent> evaluators = null;
evaluators = EvaluatorsFactory.getEvaluators(PresetConfigGenerator.configDiscrepancyAnalysis(table1, table2));
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
@ -197,7 +198,7 @@ public class ExperimentsForLatimeria {
public static void calcquality(String table,String presenceTable, String absenceTable) throws Exception{
System.out.println("*****************************QUALITY: "+table+" vs "+presenceTable+" and "+absenceTable+"************************************");
List<Evaluator> evaluators = null;
List<ComputationalAgent> evaluators = null;
evaluators = EvaluatorsFactory.getEvaluators(PresetConfigGenerator.configQualityAnalysis(presenceTable,absenceTable,table));
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
@ -210,7 +211,7 @@ public class ExperimentsForLatimeria {
public static void generateHCAFFilter() throws Exception{
System.out.println("*****************************HCAF FILTER ON : "+x1+","+y1+","+x2+","+y2+"************************************");
List<Transducerer> trans = null;
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(PresetConfigGenerator.configHCAFfilter(filteredhcaf, x1, y1, x2, y2));
trans.get(0).init();
Regressor.process(trans.get(0));
@ -219,7 +220,7 @@ public class ExperimentsForLatimeria {
public static void calcHRS(String hcaf,String absenceTable,String presenceTable) throws Exception{
System.out.println("*****************************HRS: "+absenceTable+","+presenceTable+" vs "+hcaf+"************************************");
List<Evaluator> evaluators = null;
List<ComputationalAgent> evaluators = null;
evaluators = EvaluatorsFactory.getEvaluators(PresetConfigGenerator.configHRSAnalysis(filteredhcaf, absenceTable, presenceTable));
evaluators.get(0).init();
Regressor.process(evaluators.get(0));

View File

@ -6,6 +6,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
public class RegressionTestClusterers {
@ -17,7 +18,7 @@ public class RegressionTestClusterers {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Clusterer> clus;
List<ComputationalAgent> clus;
clus = ClusterersFactory.getClusterers(testConfigLocal());

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
@ -14,7 +15,7 @@ public class RegressionTestEvaluators {
public static void main(String[] args) throws Exception {
List<Evaluator> evaluators = EvaluatorsFactory.getEvaluators(testConfig1());
List<ComputationalAgent> evaluators = EvaluatorsFactory.getEvaluators(testConfig1());
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
evaluators = null;

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -16,7 +17,7 @@ public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal());
List<ComputationalAgent> generators = GeneratorsFactory.getGenerators(testConfigLocal());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;

View File

@ -3,6 +3,7 @@ package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -18,7 +19,7 @@ public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Modeler> modelers = ModelersFactory.getModelers(testConfigLocal());
List<ComputationalAgent> modelers = ModelersFactory.getModelers(testConfigLocal());
modelers.get(0).init();
Regressor.process(modelers.get(0));
modelers = null;

View File

@ -4,6 +4,7 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
@ -12,7 +13,7 @@ public class RegressionTestTransducers {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Transducerer> trans = null;
List<ComputationalAgent> trans = null;
/*
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();

View File

@ -1,5 +1,6 @@
package org.gcube.dataanalysis.ecoengine.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
@ -51,13 +52,14 @@ public class Regressor {
public static AlgorithmConfiguration getConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://146.48.87.169/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
AnalysisLogger.setLogger(config.getConfigPath()+AlgorithmConfiguration.defaultLoggerFile);
return config;
}
}

View File

@ -32,18 +32,18 @@ public class OccurrencePointsMerger implements Transducerer {
static protected String finalTableNameL = "final_Table_Name";
static protected String longitudeColumn = "longitudeColumn";
static protected String latitudeColumn = "latitudeColumn";
static protected String recordedByColumn = "recordedByColumn";
static protected String scientificNameColumn = "scientificNameColumn";
static protected String eventDateColumn = "eventDateColumn";
static protected String lastModificationColumn = "lastModificationColumn";
static protected String rightTableNameF = "rightTableName";
static protected String leftTableNameF = "leftTableName";
static protected String finalTableNameF = "finalTableName";
static protected String spatialTolerance = "spatialTolerance";
static protected String confidence = "confidence";
static protected String sqlDateFormat = "MM/DD/YYYY HH24:MI:SS";
static protected String javaDateFormat = "MM/dd/yyyy HH:mm:ss";
static protected String latitudeColumn = "latitudeColumn";
static protected String recordedByColumn = "recordedByColumn";
static protected String scientificNameColumn = "scientificNameColumn";
static protected String eventDateColumn = "eventDateColumn";
static protected String lastModificationColumn = "lastModificationColumn";
static protected String rightTableNameF = "rightTableName";
static protected String leftTableNameF = "leftTableName";
static protected String finalTableNameF = "finalTableName";
static protected String spatialTolerance = "spatialTolerance";
static protected String confidence = "confidence";
static protected String sqlDateFormat = "MM/DD/YYYY HH24:MI:SS";
static protected String javaDateFormat = "MM/dd/yyyy HH:mm:ss";
protected List<OccurrenceRecord> records_left;
protected List<OccurrenceRecord> records_right;
@ -281,7 +281,7 @@ public class OccurrencePointsMerger implements Transducerer {
@Override
public void init() throws Exception {
// AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
lonFld = config.getParam(longitudeColumn);
latFld = config.getParam(latitudeColumn);
recordedByFld = config.getParam(recordedByColumn);
@ -478,19 +478,21 @@ public class OccurrencePointsMerger implements Transducerer {
public List<Object> leftRows;
public List<Object> rightRows;
public int getNumLeftObjects(){
if (leftRows!=null)
public int getNumLeftObjects() {
if (leftRows != null)
return leftRows.size();
else return 0;
else
return 0;
}
public int getNumRightObjects(){
if (rightRows!=null)
public int getNumRightObjects() {
if (rightRows != null)
return rightRows.size();
else return 0;
else
return 0;
}
public void takeFullRanges() {
// take the elements from sx table
AnalysisLogger.getLogger().info("Taking elements from left table: " + leftTableName);
@ -512,60 +514,67 @@ public class OccurrencePointsMerger implements Transducerer {
public void computeRange() throws Exception {
try {
AnalysisLogger.getLogger().info("Processing " + leftTableName + " vs " + rightTableName);
// AnalysisLogger.getLogger().info("ELEMENTS " + getNumLeftObjects() + " vs " + getNumRightObjects());
status = 10;
int rightCounter = 0;
int similaritiesCounter = 0;
int allrightrows = rightRows.size();
for (Object rRow : rightRows) {
// transform into an occurrence object
OccurrenceRecord rightOcc = row2OccurrenceRecord((Object[]) rRow);
// for each element in sx
int k = 0;
boolean found = false;
float p = 0;
OccurrenceRecord bestleftOcc = null;
for (Object lRow : leftRows) {
OccurrenceRecord leftOcc = null;
leftOcc = row2OccurrenceRecord((Object[]) lRow);
p = extProb(leftOcc, rightOcc);
if ((allrightrows > 0) && (getNumLeftObjects() > 0)) {
for (Object rRow : rightRows) {
// AnalysisLogger.getLogger().info("RR CONV");
// transform into an occurrence object
OccurrenceRecord rightOcc = row2OccurrenceRecord((Object[]) rRow);
// AnalysisLogger.getLogger().info("RR CONV - OK");
// for each element in sx
int k = 0;
boolean found = false;
float p = 0;
OccurrenceRecord bestleftOcc = null;
for (Object lRow : leftRows) {
OccurrenceRecord leftOcc = null;
// AnalysisLogger.getLogger().info("LL CONV");
leftOcc = row2OccurrenceRecord((Object[]) lRow);
p = extProb(leftOcc, rightOcc);
// AnalysisLogger.getLogger().info("P");
if (p >= confidenceValue) {
bestleftOcc = leftOcc;
found = true;
similaritiesCounter++;
AnalysisLogger.getLogger().info("Found a similarity with P=" + p + " between (" + "\"" + leftOcc.scientificName + "\"" + ",\"" + leftOcc.x + "\"" + "," + "\"" + leftOcc.y + "\"" + "," + "\"" + leftOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(leftOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + rightOcc.scientificName + "\"" + "," + "\"" + rightOcc.x + "\"" + "," + "\"" + rightOcc.y + "\"" + "," + "\"" + rightOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(rightOcc.eventdate) + "\"" + ")");
// break;
if (!firstbest)
manageHighProbability(p, bestleftOcc, rightOcc);
else
break;
} else if (!firstbest)
manageLowProbability(p, bestleftOcc, rightOcc);
k++;
}
rightCounter++;
if (p >= confidenceValue) {
bestleftOcc = leftOcc;
found = true;
similaritiesCounter++;
AnalysisLogger.getLogger().info("Found a similarity with P=" + p + " between (" + "\"" + leftOcc.scientificName + "\"" + ",\"" + leftOcc.x + "\"" + "," + "\"" + leftOcc.y + "\"" + "," + "\"" + leftOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(leftOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + rightOcc.scientificName + "\"" + "," + "\"" + rightOcc.x + "\"" + "," + "\"" + rightOcc.y + "\"" + "," + "\"" + rightOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(rightOcc.eventdate) + "\"" + ")");
// break;
if (!firstbest)
if (firstbest) {
if (found)
manageHighProbability(p, bestleftOcc, rightOcc);
else
break;
} else if (!firstbest)
manageLowProbability(p, bestleftOcc, rightOcc);
k++;
}
rightCounter++;
manageLowProbability(p, bestleftOcc, rightOcc);
}
if (firstbest) {
if (found)
manageHighProbability(p, bestleftOcc, rightOcc);
else
manageLowProbability(p, bestleftOcc, rightOcc);
}
status = Math.min(90, 10f + (80 * ((float) rightCounter) / ((float) allrightrows)));
status = Math.min(90, 10f + (80 * ((float) rightCounter) / ((float) allrightrows)));
if (rightCounter % 500 == 0) {
AnalysisLogger.getLogger().info("Persisting ... " + rightCounter + " over " + allrightrows);
persist();
if (rightCounter % 500 == 0) {
AnalysisLogger.getLogger().info("Persisting ... " + rightCounter + " over " + allrightrows);
persist();
}
}
}
AnalysisLogger.getLogger().info("Found " + similaritiesCounter + " similarities on " + rightCounter + " elements");
status = 90;
// transform the complete list into a table
persist();
// close DB connection
} catch (Exception e) {
System.err.println("Error in computation");
AnalysisLogger.getLogger().info(e);
throw e;
} finally {
shutdown();
@ -577,101 +586,10 @@ public class OccurrencePointsMerger implements Transducerer {
@Override
public void compute() throws Exception {
try {
// init DB connection
AnalysisLogger.getLogger().info("Initializing DB Connection");
dbconnection = DatabaseUtils.initDBSession(config);
AnalysisLogger.getLogger().info("Taking Table Description");
extractColumnNames();
initDB(true);
takeFullRanges();
computeRange();
AnalysisLogger.getLogger().info("Taken Table Description: " + columns);
AnalysisLogger.getLogger().info("Creating final table: " + finalTableName);
// create new merged table
try {
DatabaseFactory.executeSQLUpdate(DatabaseUtils.dropTableStatement(finalTableName), dbconnection);
} catch (Exception e1) {
}
prepareFinalTable();
// take the elements from sx table
AnalysisLogger.getLogger().info("Taking elements from left table: " + leftTableName);
List<Object> leftRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(leftTableName, columns.toString(), ""), dbconnection);
// take the elements from dx table
AnalysisLogger.getLogger().info("Taking elements from right table: " + rightTableName);
List<Object> rightRows = DatabaseFactory.executeSQLQuery(DatabaseUtils.getColumnsElementsStatement(rightTableName, columns.toString(), ""), dbconnection);
// for each element in dx
List<OccurrenceRecord> leftRecords = new ArrayList<OccurrencePointsMerger.OccurrenceRecord>();
AnalysisLogger.getLogger().info("Processing " + leftTableName + " vs " + rightTableName);
status = 10;
int rightCounter = 0;
int similaritiesCounter = 0;
int allrightrows = rightRows.size();
for (Object rRow : rightRows) {
// transform into an occurrence object
OccurrenceRecord rightOcc = row2OccurrenceRecord((Object[]) rRow);
// for each element in sx
int k = 0;
int leftrecordsSize = 0;
boolean found = false;
float p = 0;
OccurrenceRecord bestleftOcc = null;
for (Object lRow : leftRows) {
OccurrenceRecord leftOcc = null;
// only for the first iteration on the left occurrences perform the transformation
/*
* if (leftrecordsSize <= k) { // transform into an occurrence object leftOcc = row2OccurrenceRecord((Object[]) lRow); leftRecords.add(leftOcc); leftrecordsSize++; // System.out.println("ADDED "+k+"-th elements size: "+leftRecords.size()); } else leftOcc = leftRecords.get(k);
*/
leftOcc = row2OccurrenceRecord((Object[]) lRow);
// evaluate P(dx,sx)
p = extProb(leftOcc, rightOcc);
if (p >= confidenceValue) {
bestleftOcc = leftOcc;
found = true;
similaritiesCounter++;
AnalysisLogger.getLogger().info("Found a similarity with P=" + p + " between (" + "\"" + leftOcc.scientificName + "\"" + ",\"" + leftOcc.x + "\"" + "," + "\"" + leftOcc.y + "\"" + "," + "\"" + leftOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(leftOcc.eventdate) + "\"" + ") VS " + "(" + "\"" + rightOcc.scientificName + "\"" + "," + "\"" + rightOcc.x + "\"" + "," + "\"" + rightOcc.y + "\"" + "," + "\"" + rightOcc.recordedby + "\"" + "," + "\"" + convert2conventionalFormat(rightOcc.eventdate) + "\"" + ")");
// break;
if (!firstbest)
manageHighProbability(p, bestleftOcc, rightOcc);
else
break;
} else if (!firstbest)
manageLowProbability(p, bestleftOcc, rightOcc);
k++;
}
rightCounter++;
if (firstbest) {
if (found)
manageHighProbability(p, bestleftOcc, rightOcc);
else
manageLowProbability(p, bestleftOcc, rightOcc);
}
status = Math.min(90, 10f + (80 * ((float) rightCounter) / ((float) allrightrows)));
if (rightCounter % 500 == 0) {
AnalysisLogger.getLogger().info("Persisting ... " + rightCounter + " over " + allrightrows);
persist();
}
}
AnalysisLogger.getLogger().info("Found " + similaritiesCounter + " similarities on " + rightCounter + " elements");
status = 90;
// transform the complete list into a table
persist();
// close DB connection
} catch (Exception e) {
throw e;
} finally {
if (dbconnection != null)
try {
dbconnection.close();
} catch (Exception e) {
}
status = 100;
AnalysisLogger.getLogger().info("Occ Points Processing Finished and db closed");
}
}
public static void main(String[] args) throws Exception {

View File

@ -6,12 +6,13 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
public class EvaluatorT implements Runnable{
Evaluator dg;
ComputationalAgent dg;
AlgorithmConfiguration config;
@ -26,15 +27,15 @@ public class EvaluatorT implements Runnable{
}
}
public EvaluatorT(Evaluator dg, AlgorithmConfiguration config) {
public EvaluatorT(ComputationalAgent dg, AlgorithmConfiguration config) {
this.dg = dg;
this.config = config;
}
public static void evaluate(AlgorithmConfiguration config) throws Exception {
List<Evaluator> evaluators = EvaluatorsFactory.getEvaluators(config);
Evaluator evaluator = evaluators.get(0);
List<ComputationalAgent> evaluators = EvaluatorsFactory.getEvaluators(config);
ComputationalAgent evaluator = evaluators.get(0);
if (evaluator != null) {
EvaluatorT tc = new EvaluatorT(evaluator,config);

View File

@ -3,13 +3,14 @@ package org.gcube.dataanalysis.ecoengine.user;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class GeneratorT implements Runnable{
Generator dg;
ComputationalAgent dg;
public GeneratorT(Generator dg) {
public GeneratorT(ComputationalAgent dg) {
this.dg = dg;
}
@ -25,8 +26,8 @@ public class GeneratorT implements Runnable{
public static void generate(AlgorithmConfiguration config) throws Exception {
List<Generator> generators = GeneratorsFactory.getGenerators(config);
Generator generator = generators.get(0);
List<ComputationalAgent> generators = GeneratorsFactory.getGenerators(config);
ComputationalAgent generator = generators.get(0);
if (generator != null) {
generator.init();
@ -37,10 +38,10 @@ public class GeneratorT implements Runnable{
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
// String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
// System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}

View File

@ -3,15 +3,16 @@ package org.gcube.dataanalysis.ecoengine.user;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
public class ModelerT implements Runnable {
Modeler dg;
ComputationalAgent dg;
AlgorithmConfiguration config;
public ModelerT(Modeler dg, AlgorithmConfiguration config) {
public ModelerT(ComputationalAgent dg, AlgorithmConfiguration config) {
this.dg = dg;
this.config = config;
}
@ -19,7 +20,7 @@ public class ModelerT implements Runnable {
public void run() {
try {
dg.model(null);
dg.compute();
} catch (Exception e) {
}
@ -27,8 +28,8 @@ public class ModelerT implements Runnable {
public static void train(AlgorithmConfiguration config) throws Exception {
List<Modeler> modelers = ModelersFactory.getModelers(config);
Modeler modeler = modelers.get(0);
List<ComputationalAgent> modelers = ModelersFactory.getModelers(config);
ComputationalAgent modeler = modelers.get(0);
if (modeler != null) {
ModelerT tc = new ModelerT(modeler, config);