diff --git a/cfg/evaluators.properties b/cfg/evaluators.properties index 3269cae..1245533 100644 --- a/cfg/evaluators.properties +++ b/cfg/evaluators.properties @@ -1,2 +1,3 @@ DISCREPANCY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis -QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis \ No newline at end of file +QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis +HRS=org.gcube.dataanalysis.ecoengine.evaluation.HabitatRepresentativeness \ No newline at end of file diff --git a/cfg/transducerers.properties b/cfg/transducerers.properties index f656929..dfb185d 100644 --- a/cfg/transducerers.properties +++ b/cfg/transducerers.properties @@ -1 +1,2 @@ TESTTRANS=org.gcube.dataanalysis.ecoengine.transducers.TestTrans +BIOCLIMATE_HSPEC=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPECTransducer diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java b/src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java index 5ff3a02..b4858bb 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/clustering/DBScan.java @@ -1,11 +1,9 @@ package org.gcube.dataanalysis.ecoengine.clustering; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; @@ -21,9 +19,9 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; +import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; import org.gcube.dataanalysis.ecoengine.utils.Transformations; import org.hibernate.SessionFactory; -import org.postgresql.core.Utils; import com.rapidminer.example.Attribute; import com.rapidminer.example.Attributes; @@ -91,7 +89,7 @@ public class DBScan implements Clusterer{ config.setConfigPath("./cfg/"); config.initRapidMiner(); dbscanner.setConfiguration(config); - dbscanner.cluster(); + dbscanner.compute(); } @@ -125,7 +123,7 @@ public class DBScan implements Clusterer{ config.initRapidMiner(); long t0 = System.currentTimeMillis(); dbscanner.setConfiguration(config); - dbscanner.cluster(); + dbscanner.compute(); System.out.println("ELAPSED "+(System.currentTimeMillis()-t0)); } @@ -150,7 +148,7 @@ public class DBScan implements Clusterer{ DBScan dbscanner = new DBScan(); dbscanner.setConfiguration(config); dbscanner.init(); - dbscanner.cluster(); + dbscanner.compute(); System.out.println("ELAPSED "+(System.currentTimeMillis()-t0)); @@ -258,7 +256,7 @@ public class DBScan implements Clusterer{ @Override - public void cluster() throws Exception { + public void compute() throws Exception { if ((config==null)||epsilon==null||minPoints==null||points==null){ throw new Exception("DBScan: Error incomplete parameters"); @@ -414,6 +412,21 @@ public class DBScan implements Clusterer{ return "Clustering with DBScan"; } + + ResourceFactory resourceManager; + public String getResourceLoad() { + if (resourceManager==null) + resourceManager = new ResourceFactory(); + return resourceManager.getResourceLoad(1); + } + + + @Override + public String getResources() { + return ResourceFactory.getResources(100f); + } + + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java index aa8520d..76ff549 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/configuration/AlgorithmConfiguration.java @@ -48,40 +48,27 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement public static String RapidMinerOperatorsFile = "operators.xml"; public static String StatisticalManagerService = "StatisticalManager"; public static String StatisticalManagerClass = "Services"; + public static String listSeparator="#"; - + public static String getListSeparator() { + return listSeparator; + } + + public static void setListSeparator(String listSeparator) { + AlgorithmConfiguration.listSeparator = listSeparator; + } + public static int chunkSize = 100000; public static int refreshResourcesTime = 10; - // database parameters - private String databaseDriver = "org.postgresql.Driver"; - private String databaseURL = null; - private String databaseUserName = null; - private String databasePassword = null; - private String databaseDialect = null; - private String databaseIdleConnectionTestPeriod = null; - private String databaseAutomaticTestTable = null; - // Algorithm Parameters private String configPath; - private String cachePath; private String persistencePath; - private String distributionTable; private String tableSpace; - private Boolean createTable = false; - private Boolean useDB = true; - private String envelopeTable; - private String csquarecodesTable; - private String occurrenceCellsTable; - private List featuresTable; - private List preprocessedTables; private List endpoints; //service and remote - private String remoteCalculatorEndpoint; - private String serviceUserName; - private String remoteEnvironment; private Integer numberOfResources = 0; //modeling diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/ResourceLoad.java b/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/ResourceLoad.java index a0d3464..e553ffe 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/ResourceLoad.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/ResourceLoad.java @@ -13,5 +13,8 @@ public class ResourceLoad { return "["+timestamp+", "+value+"]"; } - + public static String defaultResourceLoad(){ + long tk = System.currentTimeMillis(); + return new ResourceLoad(tk, 1).toString(); + } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/Resources.java b/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/Resources.java index 85d2830..596491a 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/Resources.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/Resources.java @@ -3,6 +3,8 @@ package org.gcube.dataanalysis.ecoengine.connectors.livemonitor; import java.util.ArrayList; import java.util.List; +import org.gcube.contentmanagement.graphtools.utils.HttpRequest; + public class Resources { public List list; @@ -17,4 +19,24 @@ public class Resources { } + + public static String buildLocalResourcesLog(int nres){ + + Resources res = new Resources(); + try { + for (int i = 0; i < nres; i++) { + try { + double value = 100.00; + res.addResource("Thread_" + (i + 1), value); + } catch (Exception e1) { + } + } + } catch (Exception e) { + e.printStackTrace(); + } + if ((res != null) && (res.list != null)) + return HttpRequest.toJSon(res.list).replace("resId", "resID"); + else + return ""; + } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/InputTablesList.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/InputTablesList.java new file mode 100644 index 0000000..c5b2974 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/InputTablesList.java @@ -0,0 +1,31 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import java.util.ArrayList; +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; + +public class InputTablesList extends StatisticalType { + + protected List list; + protected List templateNames; + + public InputTablesList(List templateNames, String name, String description, boolean optional) { + super(name, description, optional); + list = new ArrayList(); + this.templateNames=templateNames; + } + + public void add(InputTable st){ + list.add(st); + } + + public List getList(){ + return list; + } + + public List getTemplates(){ + return templateNames; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/PrimitiveTypesList.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/PrimitiveTypesList.java new file mode 100644 index 0000000..84c94f7 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/PrimitiveTypesList.java @@ -0,0 +1,33 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import java.util.ArrayList; +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; +import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; + +public class PrimitiveTypesList extends StatisticalType { + + protected List list; + PrimitiveTypes type; + + + public PrimitiveTypesList(PrimitiveTypes type, String name, String description, boolean optional) { + super(name, description, optional); + list = new ArrayList(); + this.type = type; + } + + public void add(PrimitiveType st){ + list.add(st); + } + + public List getList(){ + return list; + } + + public PrimitiveTypes getTemplates(){ + return type; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java index 4e11471..77d749a 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalType.java @@ -30,6 +30,13 @@ public class StatisticalType { } + public StatisticalType(String name, String description, boolean optional) { + this.name=name; + this.description=description; + this.defaultValue=""; + this.optional=optional; + } + public String getDefaultValue() { return defaultValue; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalTypeList.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalTypeList.java new file mode 100644 index 0000000..4ca4dd2 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/StatisticalTypeList.java @@ -0,0 +1,24 @@ +package org.gcube.dataanalysis.ecoengine.datatypes; + +import java.util.ArrayList; +import java.util.List; + +public class StatisticalTypeList extends StatisticalType { + + private List list; + + + public StatisticalTypeList(String name, String description, boolean optional) { + super(name, description, optional); + list = new ArrayList(); + } + + public void add(C st){ + list.add(st); + } + + public List getList(){ + return list; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java index 4c9cbca..1c84922 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/datatypes/enumtypes/PrimitiveTypes.java @@ -7,6 +7,7 @@ public enum PrimitiveTypes { RANDOM, FILE, MAP, - BOOLEAN + BOOLEAN, + IMAGES } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java index 7580eeb..9e959bd 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java @@ -7,7 +7,6 @@ import java.util.Map; import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType; import org.gcube.dataanalysis.ecoengine.datatypes.InputTable; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; @@ -19,7 +18,6 @@ import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; import org.gcube.dataanalysis.ecoengine.utils.Operations; -import org.hibernate.SessionFactory; public class DiscrepancyAnalysis extends DataAnalysis { @@ -37,7 +35,7 @@ public class DiscrepancyAnalysis extends DataAnalysis { float threshold = 0.1f; String configPath = "./cfg/"; - SessionFactory connection; + List errors; double mean; double variance; @@ -84,16 +82,8 @@ public class DiscrepancyAnalysis extends DataAnalysis { } - @Override - public void init(AlgorithmConfiguration config) throws Exception { - AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - // init db connection - connection = AlgorithmConfiguration.getConnectionFromConfig(config); - } - - @Override - public HashMap analyze(AlgorithmConfiguration config) throws Exception { + public HashMap analyze() throws Exception { String FirstTableCsquareColumn = config.getParam("FirstTableCsquareColumn"); String SecondTableCsquareColumn = config.getParam("SecondTableCsquareColumn"); @@ -148,17 +138,10 @@ public class DiscrepancyAnalysis extends DataAnalysis { output.put("ACCURACY", "" + accuracy); output.put("MAXIMUM_ERROR", "" + maxerror); output.put("MAXIMUM_ERROR_POINT", "" + maxdiscrepancyPoint); - + return output; - } - public void end() { - try { - connection.close(); - } catch (Exception e) { - } - } void calcDiscrepancy() { double[] err = new double[errors.size()]; @@ -206,9 +189,8 @@ public class DiscrepancyAnalysis extends DataAnalysis { } - @Override - public StatisticalType getOutput() { - PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "ErrorsAnalysis","Analysis of the discrepancies"); - return p; + @Override + public String getDescription() { + return "Discrepancy Analysis bewteen two HSPEC distributions"; } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java index d009c72..79d68ac 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java @@ -17,7 +17,6 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.hibernate.SessionFactory; import com.rapidminer.example.Attribute; import com.rapidminer.example.Attributes; @@ -34,7 +33,6 @@ public class DistributionQualityAnalysis extends DataAnalysis { static String getProbabilititesQuery = "select count(*) as distribprob from %1$s as a join %2$s as b on a.%3$s=b.%4$s and b.%5$s %6$s %7$s"; static String getNumberOfElementsQuery = "select count(*) from %1$s"; - static String getValuesQuery = "select %5$s as distribprob from %1$s as a join %2$s as b on a.%3$s=b.%4$s"; float threshold = 0.1f; @@ -146,7 +144,7 @@ public class DistributionQualityAnalysis extends DataAnalysis { return points; } - public HashMap analyze(AlgorithmConfiguration config) throws Exception { + public HashMap analyze() throws Exception { try { acceptanceThreshold = Float.parseFloat(config.getParam("PositiveThreshold")); @@ -206,7 +204,6 @@ public class DistributionQualityAnalysis extends DataAnalysis { output.put("BESTTHRESHOLD", "" + bestThreshold); return output; - } public double calculateSensitivity(int TP, int FN) { @@ -280,21 +277,6 @@ public class DistributionQualityAnalysis extends DataAnalysis { } } - SessionFactory connection; - - public void init(AlgorithmConfiguration config) throws Exception { - AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - // init db connection - connection = AlgorithmConfiguration.getConnectionFromConfig(config); - } - - public void end() { - try { - connection.close(); - } catch (Exception e) { - } - } - public static void main(String[] args) { AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile); @@ -355,4 +337,9 @@ public class DistributionQualityAnalysis extends DataAnalysis { return p; } + @Override + public String getDescription() { + return "Calculates the ROC, AUC and Accuracy of a model"; + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java index 2a9fd22..c374a27 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/HabitatRepresentativeness.java @@ -197,7 +197,7 @@ public class HabitatRepresentativeness extends DataAnalysis { private double currentHRSScore; private double [] currentHRSVector; - public HashMap analyze(AlgorithmConfiguration config) throws Exception { + public HashMap analyze() throws Exception { try { status = 0; @@ -236,8 +236,6 @@ public class HabitatRepresentativeness extends DataAnalysis { AnalysisLogger.getLogger().error("ALERT: AN ERROR OCCURRED DURING HRS CALCULATION : " + e.getLocalizedMessage()); throw e; } finally { - connection.close(); - status = 100; AnalysisLogger.getLogger().trace("COMPUTATION FINISHED "); } } @@ -271,22 +269,6 @@ public class HabitatRepresentativeness extends DataAnalysis { } } - SessionFactory connection; - - public void init(AlgorithmConfiguration config) throws Exception { - AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - // init db connection - // connection = AlgorithmConfiguration.getConnectionFromConfig(config); - config.initRapidMiner(); - } - - public void end() { - try { - connection.close(); - } catch (Exception e) { - } - } - public static void main(String[] args) throws Exception { AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile); @@ -303,8 +285,10 @@ public class HabitatRepresentativeness extends DataAnalysis { // config.setParam("NegativeCasesTable", "absence_data_baskingshark2"); HabitatRepresentativeness hsrcalc = new HabitatRepresentativeness(); - hsrcalc.init(config); - HashMap output = hsrcalc.analyze(config); + hsrcalc.setConfiguration(config); + hsrcalc.init(); + + HashMap output = hsrcalc.analyze(); for (String param:output.keySet()){ System.out.println(param+":"+output.get(param)); } @@ -346,4 +330,9 @@ public class HabitatRepresentativeness extends DataAnalysis { return status==100f?status: Math.min((status+(float)(currentIterationStep+1)*innerstatus/(float)maxTests),99f); } + @Override + public String getDescription() { + return "Calculates the Habitat Representativeness Score for a set of Features"; + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java index 436047e..da9921f 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java @@ -666,7 +666,7 @@ public class BioClimateAnalysis { } public int calcHighProbabilityCells(String hspec, double probabilty) throws Exception { - AnalysisLogger.getLogger().trace("Calculating High Prob Cells"); + AnalysisLogger.getLogger().trace("Calculating High Prob Cells: "+String.format(countHighProbabilityCells, hspec, probabilty)); List countage = DatabaseFactory.executeSQLQuery(String.format(countHighProbabilityCells, hspec, probabilty), referencedbConnection); int count = Integer.parseInt("" + countage.get(0)); AnalysisLogger.getLogger().trace("Calc High Prob Cells: " + count); @@ -745,7 +745,8 @@ public class BioClimateAnalysis { config.setParam("MaxSamples", "" + 30000); eval = EvaluatorsFactory.getEvaluators(config).get(0); - PrimitiveType output = (PrimitiveType) eval.process(config); + eval.compute(); + PrimitiveType output = (PrimitiveType) eval.getOutput(); HashMap out = (HashMap)output.getContent(); diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java index 72d1fc0..75f23cc 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Clusterer.java @@ -1,29 +1,6 @@ package org.gcube.dataanalysis.ecoengine.interfaces; -import java.util.List; -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; -import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; - -public interface Clusterer { - - public INFRASTRUCTURE getInfrastructure(); - - public void init() throws Exception; - - public void setConfiguration(AlgorithmConfiguration config); - - public void shutdown(); - - public float getStatus(); - - public String getDescription(); - - public List getInputParameters(); - - public StatisticalType getOutput(); - - public void cluster() throws Exception; +public interface Clusterer extends ComputationalAgent{ + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java index 3d13aa0..f1004a9 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/ComputationalAgent.java @@ -2,6 +2,7 @@ package org.gcube.dataanalysis.ecoengine.interfaces; import java.util.List; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; @@ -22,5 +23,14 @@ public interface ComputationalAgent { // gets the content of the model: e.g. Table indications etc. public StatisticalType getOutput(); + public void init() throws Exception; + + public void setConfiguration(AlgorithmConfiguration config); + + public void shutdown(); + + public String getDescription(); + + public void compute() throws Exception; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java index 8351167..000eb0d 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java @@ -1,15 +1,16 @@ package org.gcube.dataanalysis.ecoengine.interfaces; import java.util.HashMap; -import java.util.List; import java.util.Map; +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; +import org.hibernate.SessionFactory; /** * Implements a mono-thread data analysis process @@ -22,34 +23,10 @@ public abstract class DataAnalysis implements Evaluator{ protected ResourceFactory resourceManager; protected int processedRecords; protected float status; + protected AlgorithmConfiguration config; + protected SessionFactory connection; - /** - * establishes input parameters for this algorithm along with their type - */ - public abstract List getInputParameters(); - - - - /** - * Executed the core of the algorithm - * @param config - * @return - * @throws Exception - */ - public abstract HashMap analyze(AlgorithmConfiguration config) throws Exception; - - /** - * initializes the procedure e.g. connects to the database - * @param config - * @throws Exception - */ - public abstract void init(AlgorithmConfiguration config) throws Exception; - - /** - * ends the processing, e.g. closes connections - * @throws Exception - */ - public abstract void end(); + public abstract HashMap analyze() throws Exception; /** * Processing skeleton : init-analyze-end @@ -57,13 +34,13 @@ public abstract class DataAnalysis implements Evaluator{ * @return * @throws Exception */ - public StatisticalType process(AlgorithmConfiguration config) throws Exception{ + HashMap out; + public void compute() throws Exception{ status = 0; - HashMap out = new HashMap(); + out = new HashMap(); try{ - init(config); - out = analyze(config); - end(); + out = analyze(); + shutdown(); }catch(Exception e){ e.printStackTrace(); throw e; @@ -71,9 +48,16 @@ public abstract class DataAnalysis implements Evaluator{ finally{ status = 100; } - return new PrimitiveType(Map.class.getName(), out, PrimitiveTypes.MAP, "Analysis","Analysis Results"); + } + @Override + public StatisticalType getOutput() { + PrimitiveType p = new PrimitiveType(Map.class.getName(), out, PrimitiveTypes.MAP, "AnalysisResult","Analysis Values"); + return p; + } + + /** * calculates the number of processed records per unity of time: the timing is calculated internally by the resourceManager and used when the method is interrogated */ @@ -120,4 +104,23 @@ public abstract class DataAnalysis implements Evaluator{ } + public void init() throws Exception { + AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); + // init db connection + connection = AlgorithmConfiguration.getConnectionFromConfig(config); + config.initRapidMiner(); + } + + + public void setConfiguration(AlgorithmConfiguration config) { + this.config = config; + } + + public void shutdown() { + try { + connection.close(); + } catch (Exception e) { + } + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java index 5c915ad..bf0d8d7 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java @@ -1,18 +1,7 @@ package org.gcube.dataanalysis.ecoengine.interfaces; -import java.util.HashMap; - -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; public interface Evaluator extends ComputationalAgent{ - - public StatisticalType process(AlgorithmConfiguration config) throws Exception; - - public abstract void init(AlgorithmConfiguration config) throws Exception; - - public abstract void end(); - } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java index 7a676d2..3d5afba 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java @@ -8,17 +8,8 @@ public interface Generator extends ComputationalAgent{ public ALG_PROPS[] getSupportedAlgorithms(); - public INFRASTRUCTURE getInfrastructure(); - - public void init() throws Exception; - - public void setConfiguration(AlgorithmConfiguration config); - - public void shutdown(); - + public SpatialProbabilityDistribution getAlgorithm(); + public String getLoad(); - public void generate() throws Exception; - - public SpatialProbabilityDistribution getAlgorithm(); } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java index c527ee7..2d02b5a 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Model.java @@ -7,12 +7,9 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; public interface Model { - - //defines the properties of this algorithm public ALG_PROPS[] getProperties(); - //defines the name of this model public String getName(); //gets the description of the model diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Modeler.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Modeler.java index 413af44..d3a81b4 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Modeler.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Modeler.java @@ -8,17 +8,10 @@ public interface Modeler extends ComputationalAgent{ public ALG_PROPS[] getSupportedModels(); - //gets the weight of the generator: according to this the generator will be placed in the execution order - public INFRASTRUCTURE getInfrastructure(); + public Model getModel(); public void setmodel(Model model); - public void model(AlgorithmConfiguration Input, Model previousModel); - - public void model(AlgorithmConfiguration Input); - - public void stop(); - - public Model getModel(); + public void model(Model previousModel); } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Transducerer.java b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Transducerer.java index 3779cee..bb708d7 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Transducerer.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/interfaces/Transducerer.java @@ -1,29 +1,6 @@ package org.gcube.dataanalysis.ecoengine.interfaces; -import java.util.List; -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; -import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; +public interface Transducerer extends ComputationalAgent{ -public interface Transducerer { - - public INFRASTRUCTURE getInfrastructure(); - - public void init() throws Exception; - - public void setConfiguration(AlgorithmConfiguration config); - - public void shutdown(); - - public float getStatus(); - - public String getDescription(); - - public List getInputParameters(); - - public StatisticalType getOutput(); - - public void transform() throws Exception; } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java b/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java index e845765..380e7c7 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java @@ -10,17 +10,18 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; public class SimpleModeler implements Modeler{ - private Model innermodel; + protected Model innermodel; + protected AlgorithmConfiguration Input; @Override - public void model(AlgorithmConfiguration Input, Model previousModel) { + public void model(Model previousModel) { innermodel.init(Input, previousModel); innermodel.train(Input, previousModel); innermodel.postprocess(Input, previousModel); } @Override - public void model(AlgorithmConfiguration Input) { + public void compute() throws Exception{ innermodel.init(Input, null); innermodel.train(Input, null); innermodel.postprocess(Input, null); @@ -52,7 +53,7 @@ public class SimpleModeler implements Modeler{ } @Override - public void stop() { + public void shutdown() { innermodel.stop(); } @@ -76,4 +77,18 @@ public class SimpleModeler implements Modeler{ return innermodel.getOutput(); } + @Override + public void init() throws Exception { + } + + @Override + public void setConfiguration(AlgorithmConfiguration config) { + Input = config; + } + + @Override + public String getDescription() { + return "A Generic Modeler invoking training"; + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java index 13a6d1e..2d27d9f 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSimpleSplitGenerator.java @@ -152,7 +152,7 @@ public class LocalSimpleSplitGenerator implements Generator { } @Override - public void generate() throws Exception { + public void compute() throws Exception { // INITIALIZATION long tstart = System.currentTimeMillis(); try { @@ -344,6 +344,11 @@ public class LocalSimpleSplitGenerator implements Generator { return distributionModel; } + @Override + public String getDescription() { + return "A generator which splits a distribution on different threads along the species dimension"; + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java index df61439..5359295 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/LocalSplitGenerator.java @@ -236,7 +236,7 @@ public class LocalSplitGenerator implements Generator { } @Override - public void generate() throws Exception { + public void compute() throws Exception { // INITIALIZATION long tstart = System.currentTimeMillis(); try { @@ -513,5 +513,10 @@ public class LocalSplitGenerator implements Generator { return distributionModel; } + @Override + public String getDescription() { + return "A generator based on tabular data production, which splits a distribution on different threads along the species dimension"; + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java index c8114b4..e687cf0 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/GeneratorsFactory.java @@ -107,6 +107,7 @@ public class GeneratorsFactory { //investigate on possible suitable generators for (Object generatorName:pg.values()){ Generator gen = (Generator)Class.forName((String)generatorName).newInstance(); + gen.setConfiguration(config); ALG_PROPS[] supportedAlgs = gen.getSupportedAlgorithms(); boolean genSuitable = false; for (ALG_PROPS prop:algp){ diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java index e156237..9c0387b 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ModelersFactory.java @@ -52,6 +52,7 @@ public static List getModelers(AlgorithmConfiguration config) throws Ex //if the algorithm is a generator itself then execute it if (algclass instanceof Modeler){ Modeler g = (Modeler) algclass; + g.setConfiguration(config); modelers.add(g); } else @@ -64,6 +65,7 @@ public static List getModelers(AlgorithmConfiguration config) throws Ex //investigate on possible suitable modelers for (Object modelerName:pg.values()){ Modeler gen = (Modeler)Class.forName((String)modelerName).newInstance(); + gen.setConfiguration(config); ALG_PROPS[] supportedAlgs = gen.getSupportedModels(); boolean genSuitable = false; for (ALG_PROPS prop:algp){ diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java index d09f87f..51b4acc 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/processing/factories/ProcessorsFactory.java @@ -82,14 +82,16 @@ public class ProcessorsFactory { if (algclass instanceof Generator) { Generator g = (Generator) algclass; g.setConfiguration(config); - g.init(); + g.setConfiguration(config); return g; } else if (algclass instanceof Modeler) { Modeler m = (Modeler) algclass; + m.setConfiguration(config); return m; } else if (algclass instanceof Evaluator) { Evaluator m = (Evaluator) algclass; + m.setConfiguration(config); return m; } else if (algclass instanceof Clusterer) { diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java index 0e93a44..d4a59e2 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/TestsMetaInfo.java @@ -3,7 +3,6 @@ package org.gcube.dataanalysis.ecoengine.test; import java.util.HashMap; import java.util.List; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer; @@ -98,47 +97,6 @@ public static void main(String[] args) throws Exception { } - private static void generate(Generator generator) throws Exception { - - if (generator != null) { - TestsMetaInfo tgs = new TestsMetaInfo(); - ThreadCalculator tc = tgs.new ThreadCalculator(generator); - Thread t = new Thread(tc); - t.start(); - while (generator.getStatus() < 100) { - - String resLoad = generator.getResourceLoad(); - String ress = generator.getResources(); - String species = generator.getLoad(); - System.out.println("LOAD: " + resLoad); - System.out.println("RESOURCES: " + ress); - System.out.println("SPECIES: " + species); - System.out.println("STATUS: " + generator.getStatus()); - Thread.sleep(1000); - } - } else - AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported"); - - } - - public class ThreadCalculator implements Runnable { - Generator dg; - - public ThreadCalculator(Generator dg) { - this.dg = dg; - } - - public void run() { - try { - - dg.generate(); - - } catch (Exception e) { - } - } - - } - private static AlgorithmConfiguration testConfigTrans() { AlgorithmConfiguration config = new AlgorithmConfiguration(); config.setConfigPath("./cfg/"); diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionComplexGeneration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/RegressionComplexGeneration.java similarity index 95% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionComplexGeneration.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/RegressionComplexGeneration.java index eff00ac..8afd8e6 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionComplexGeneration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/RegressionComplexGeneration.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import java.util.HashMap; import java.util.List; @@ -66,7 +66,7 @@ public static void main(String[] args) throws Exception { public void run() { try { - dg.generate(); + dg.compute(); } catch (Exception e) { } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionSimpleGeneration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/RegressionSimpleGeneration.java similarity index 93% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionSimpleGeneration.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/RegressionSimpleGeneration.java index 2af7bfd..fd380f7 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/RegressionSimpleGeneration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/RegressionSimpleGeneration.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import java.util.List; @@ -63,7 +63,7 @@ public static void main(String[] args) throws Exception { public void run() { try { - dg.generate(); + dg.compute(); } catch (Exception e) { } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestBioClimateAnalysis.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestBioClimateAnalysis.java similarity index 94% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestBioClimateAnalysis.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestBioClimateAnalysis.java index 867339d..8dea325 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestBioClimateAnalysis.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestBioClimateAnalysis.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis; diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestClusterer.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestClusterer.java new file mode 100644 index 0000000..ba384c5 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestClusterer.java @@ -0,0 +1,82 @@ +package org.gcube.dataanalysis.ecoengine.test.checks; + +import java.util.List; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.ecoengine.clustering.DBScan; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer; +import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; +import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory; + +public class TestClusterer { + /** + * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species + * + */ + + public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + List clus = ClusterersFactory.getClusterers(testConfigLocal()); + clus .get(0).init(); + cluster(clus .get(0)); + clus = null; + + } + + private static void cluster(Clusterer clus) throws Exception { + + if (clus != null) { + TestClusterer tgs = new TestClusterer(); + ThreadCalculator tc = tgs.new ThreadCalculator(clus); + Thread t = new Thread(tc); + t.start(); + while (clus.getStatus() < 100) { + + System.out.println("STATUS: " + clus.getStatus()); + Thread.sleep(1000); + } + } else + AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported"); + + } + + public class ThreadCalculator implements Runnable { + Clusterer dg; + + public ThreadCalculator(Clusterer dg) { + this.dg = dg; + } + + public void run() { + try { + + dg.compute(); + + } catch (Exception e) { + } + } + + } + + private static AlgorithmConfiguration testConfigLocal() { + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + config.setParam("OccurrencePointsTable","presence_basking_cluster"); + config.setParam("FeaturesColumnNames","centerlat,centerlong"); + config.setParam("OccurrencePointsClusterTable","occCluster_2"); + config.setParam("epsilon","10"); + config.setParam("minPoints","1"); + + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + config.setNumberOfResources(1); + config.setAgent("DBSCAN"); + + return config; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestEvaluation.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestEvaluation.java similarity index 93% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestEvaluation.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestEvaluation.java index 95fc0db..3b731f6 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestEvaluation.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestEvaluation.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import java.util.HashMap; import java.util.List; @@ -74,7 +74,8 @@ public static void main(String[] args) throws Exception { public void run() { try { - PrimitiveType output = (PrimitiveType) dg.process(config); + dg.compute(); + PrimitiveType output = (PrimitiveType) dg.getOutput(); HashMap out = (HashMap)output.getContent(); DiscrepancyAnalysis.visualizeResults(out); diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisDev.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestHSPECBioClimateAnalysisDev.java similarity index 92% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisDev.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestHSPECBioClimateAnalysisDev.java index ab119db..5377e42 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisDev.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestHSPECBioClimateAnalysisDev.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis; diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisProd.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestHSPECBioClimateAnalysisProd.java similarity index 93% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisProd.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestHSPECBioClimateAnalysisProd.java index a4bf645..159df7c 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestHSPECBioClimateAnalysisProd.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestHSPECBioClimateAnalysisProd.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis; diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestSingleHSPECGeneration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestSingleHSPECGeneration.java similarity index 93% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestSingleHSPECGeneration.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestSingleHSPECGeneration.java index 82938bb..d83bb60 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestSingleHSPECGeneration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestSingleHSPECGeneration.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import java.util.HashMap; import java.util.List; @@ -57,7 +57,7 @@ public static void main(String[] args) throws Exception { public void run() { try { - dg.generate(); + dg.compute(); } catch (Exception e) { } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestTransducer.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestTransducer.java new file mode 100644 index 0000000..935092f --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestTransducer.java @@ -0,0 +1,84 @@ +package org.gcube.dataanalysis.ecoengine.test.checks; + +import java.util.List; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; +import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory; + +public class TestTransducer { + /** + * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species + * + */ + + public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + List trans = TransducerersFactory.getTransducerers(testConfigLocal()); + trans.get(0).init(); + transduce(trans.get(0)); + trans = null; + + } + + private static void transduce(Transducerer trans) throws Exception { + + if (trans != null) { + TestTransducer tgs = new TestTransducer(); + ThreadCalculator tc = tgs.new ThreadCalculator(trans); + Thread t = new Thread(tc); + t.start(); + while (trans.getStatus() < 100) { + + System.out.println("STATUS: " + trans.getStatus()); + Thread.sleep(1000); + } + } else + AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported"); + + } + + public class ThreadCalculator implements Runnable { + Transducerer dg; + + public ThreadCalculator(Transducerer dg) { + this.dg = dg; + } + + public void run() { + try { + + dg.compute(); + + } catch (Exception e) { + } + } + + } + + private static AlgorithmConfiguration testConfigLocal() { + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + config.setNumberOfResources(1); + config.setAgent("BIOCLIMATE_HSPEC"); + + //config.setParam("HSPEC_TABLE_LIST", "hcaf_d, hcaf_d_2015_LINEAR_01338580273835,hcaf_d_2018_LINEAR_11338580276548,hcaf_d_2021_LINEAR_21338580279237,hcaf_d_2024_LINEAR_31338580282780,hcaf_d_2027_LINEAR_41338580283400,hcaf_d_2030_LINEAR_51338580284030,hcaf_d_2033_LINEAR_61338580284663,hcaf_d_2036_LINEAR_71338580285205,hcaf_d_2039_LINEAR_81338580285958,hcaf_d_2042_LINEAR_91338580286545,hcaf_d_2050"); + //config.setParam("HSPEC_TABLE_NAMES", "test,test,test,test,test,test,test,test,test,test,test,test"); + config.setParam("HSPEC_TABLE_LIST", "hspec_validation"+AlgorithmConfiguration.getListSeparator()+"hspec_validation2"); + config.setParam("HSPEC_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"); + + config.setParam("Threshold", "0.5"); + + config.setParam("DatabaseUserName", "gcube"); + config.setParam("DatabasePassword", "d4science2"); + config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb"); + config.setParam("DatabaseDriver", "org.postgresql.Driver"); + + return config; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsDUMMYGeneration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsDUMMYGeneration.java similarity index 92% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsDUMMYGeneration.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsDUMMYGeneration.java index 3f122ac..f26b269 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsDUMMYGeneration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsDUMMYGeneration.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import java.util.List; @@ -56,7 +56,7 @@ public static void main(String[] args) throws Exception { public void run() { try { - dg.generate(); + dg.compute(); } catch (Exception e) { } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsHSPENTraining.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsHSPENTraining.java similarity index 93% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsHSPENTraining.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsHSPENTraining.java index 5bd4ee8..001af03 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsHSPENTraining.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsHSPENTraining.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import java.util.List; @@ -56,7 +56,7 @@ public static void main(String[] args) throws Exception { public void run() { try { - dg.model(config, null); + dg.model(null); } catch (Exception e) { } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsTESTGeneration.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsTESTGeneration.java similarity index 92% rename from src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsTESTGeneration.java rename to src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsTESTGeneration.java index 861cc3b..9bbc9f9 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regressions/TestsTESTGeneration.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/checks/TestsTESTGeneration.java @@ -1,4 +1,4 @@ -package org.gcube.dataanalysis.ecoengine.test.regressions; +package org.gcube.dataanalysis.ecoengine.test.checks; import java.util.List; @@ -56,7 +56,7 @@ public static void main(String[] args) throws Exception { public void run() { try { - dg.generate(); + dg.compute(); } catch (Exception e) { } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestClusterers.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestClusterers.java new file mode 100644 index 0000000..bb8140a --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestClusterers.java @@ -0,0 +1,40 @@ +package org.gcube.dataanalysis.ecoengine.test.regression; + +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer; +import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory; + +public class RegressionTestClusterers { + /** + * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species + * + */ + +public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + + List clus = ClusterersFactory.getClusterers(testConfigLocal()); + clus.get(0).init(); + Regressor.process(clus.get(0)); + clus = null; + +} + + + private static AlgorithmConfiguration testConfigLocal() { + + AlgorithmConfiguration config = Regressor.getConfig(); + config.setNumberOfResources(1); + config.setAgent("DBSCAN"); + config.setParam("OccurrencePointsTable","presence_basking_cluster"); + config.setParam("FeaturesColumnNames","centerlat,centerlong"); + config.setParam("OccurrencePointsClusterTable","occCluster_2"); + config.setParam("epsilon","10"); + config.setParam("minPoints","1"); + + return config; + } +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestEvaluators.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestEvaluators.java new file mode 100644 index 0000000..e70c54e --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestEvaluators.java @@ -0,0 +1,69 @@ +package org.gcube.dataanalysis.ecoengine.test.regression; + +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; +import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory; + +public class RegressionTestEvaluators { + /** + * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species + * + */ + + public static void main(String[] args) throws Exception { + + List evaluators = EvaluatorsFactory.getEvaluators(testConfig1()); + evaluators.get(0).init(); + Regressor.process(evaluators.get(0)); + evaluators = null; + + System.out.println("\n**********-------************\n"); + + //test Discrepancy + evaluators = EvaluatorsFactory.getEvaluators(testConfig2()); + evaluators.get(0).init(); + Regressor.process(evaluators.get(0)); + evaluators = null; + + + } + + private static AlgorithmConfiguration testConfig1() { + + AlgorithmConfiguration config = Regressor.getConfig(); + config.setNumberOfResources(1); + config.setAgent("DISCREPANCY_ANALYSIS"); + config.setParam("FirstTable", "hspec_native_baskingshark_aquamaps"); + config.setParam("SecondTable", "hspec_suitable_nn_Fis22747"); + config.setParam("FirstTableCsquareColumn", "csquarecode"); + config.setParam("SecondTableCsquareColumn", "csquarecode"); + config.setParam("FirstTableProbabilityColumn", "probability"); + config.setParam("SecondTableProbabilityColumn", "probability"); + config.setParam("ComparisonThreshold", "0.1"); + + return config; + } + + private static AlgorithmConfiguration testConfig2() { + + AlgorithmConfiguration config = Regressor.getConfig(); + config.setNumberOfResources(1); + config.setNumberOfResources(1); + config.setAgent("QUALITY_ANALYSIS"); + + config.setParam("PositiveCasesTable", "presence_data_baskingshark"); + config.setParam("NegativeCasesTable", "absence_data_baskingshark2"); + config.setParam("PositiveCasesTableKeyColumn", "csquarecode"); + config.setParam("NegativeCasesTableKeyColumn", "csquarecode"); + config.setParam("DistributionTable", "hspec_native_baskingshark_aquamaps"); + config.setParam("DistributionTableKeyColumn", "csquarecode"); + config.setParam("DistributionTableProbabilityColumn", "probability"); + config.setParam("PositiveThreshold", "0.5"); + config.setParam("NegativeThreshold", "0.5"); + return config; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestGenerators.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestGenerators.java new file mode 100644 index 0000000..4e4d545 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestGenerators.java @@ -0,0 +1,41 @@ +package org.gcube.dataanalysis.ecoengine.test.regression; + +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.Generator; +import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory; + +public class RegressionTestGenerators { + /** + * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species + * + */ + +public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + + List generators = GeneratorsFactory.getGenerators(testConfigLocal()); + generators.get(0).init(); + Regressor.process(generators.get(0)); + generators = null; + +} + + + private static AlgorithmConfiguration testConfigLocal() { + + AlgorithmConfiguration config = Regressor.getConfig(); + config.setNumberOfResources(5); + config.setModel("AQUAMAPS_SUITABLE"); + + config.setParam("DistributionTable","hspec_suitable_test_gp"); + config.setParam("CsquarecodesTable","hcaf_d"); + config.setParam("EnvelopeTable","hspen_micro"); + config.setParam("PreprocessedTable", "maxminlat_hspen"); + config.setParam("CreateTable","true"); + + return config; + } +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestModelers.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestModelers.java new file mode 100644 index 0000000..a725d71 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestModelers.java @@ -0,0 +1,43 @@ +package org.gcube.dataanalysis.ecoengine.test.regression; + +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.Generator; +import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; +import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory; +import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory; + +public class RegressionTestModelers { + /** + * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species + * + */ + +public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + + List modelers = ModelersFactory.getModelers(testConfigLocal()); + modelers.get(0).init(); + Regressor.process(modelers.get(0)); + modelers = null; + +} + + + private static AlgorithmConfiguration testConfigLocal() { + + AlgorithmConfiguration config = Regressor.getConfig(); + config.setNumberOfResources(2); + config.setModel("HSPEN"); + + config.setParam("OuputEnvelopeTable","hspen_trained"); + config.setParam("OccurrenceCellsTable","occurrencecells"); + config.setParam("EnvelopeTable","hspen_mini"); + config.setParam("CsquarecodesTable", "hcaf_d"); + config.setParam("CreateTable","true"); + + return config; + } +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestTransducers.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestTransducers.java new file mode 100644 index 0000000..fb10f5d --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/RegressionTestTransducers.java @@ -0,0 +1,39 @@ +package org.gcube.dataanalysis.ecoengine.test.regression; + +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer; +import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; +import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory; +import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory; + +public class RegressionTestTransducers { + /** + * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species + * + */ + +public static void main(String[] args) throws Exception { + + System.out.println("TEST 1"); + + List trans = TransducerersFactory.getTransducerers(testConfigLocal()); + trans.get(0).init(); + Regressor.process(trans.get(0)); + trans = null; + +} + + + private static AlgorithmConfiguration testConfigLocal() { + + AlgorithmConfiguration config = Regressor.getConfig(); + config.setAgent("BIOCLIMATE_HSPEC"); + config.setParam("HSPEC_TABLE_LIST", "hspec_validation"+AlgorithmConfiguration.getListSeparator()+"hspec_validation2"); + config.setParam("HSPEC_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test"); + config.setParam("Threshold", "0.5"); + + return config; + } +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/Regressor.java b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/Regressor.java new file mode 100644 index 0000000..e2f3907 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/test/regression/Regressor.java @@ -0,0 +1,61 @@ +package org.gcube.dataanalysis.ecoengine.test.regression; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent; + +public class Regressor { + + + public static void process(ComputationalAgent agent) throws Exception { + + if (agent != null) { + Regressor tgs = new Regressor(); + ThreadCalculator tc = tgs.new ThreadCalculator(agent); + Thread t = new Thread(tc); + t.start(); + while (agent.getStatus() < 100) { + + String resLoad = agent.getResourceLoad(); + String ress = agent.getResources(); + System.out.println("LOAD: " + resLoad); + System.out.println("RESOURCES: " + ress); + System.out.println("STATUS: " + agent.getStatus()); + Thread.sleep(1000); + } + } else + AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported"); + + } + + public class ThreadCalculator implements Runnable { + ComputationalAgent dg; + + public ThreadCalculator(ComputationalAgent dg) { + this.dg = dg; + } + + public void run() { + try { + + dg.compute(); + + } catch (Exception e) { + } + } + + } + + public static AlgorithmConfiguration getConfig() { + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + config.setConfigPath("./cfg/"); + config.setPersistencePath("./"); + config.setParam("DatabaseUserName","gcube"); + config.setParam("DatabasePassword","d4science2"); + config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb"); + config.setParam("DatabaseDriver","org.postgresql.Driver"); + + return config; + } +} diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/BioClimateHSPECTransducer.java b/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/BioClimateHSPECTransducer.java index 8f92e9d..d1b2f32 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/BioClimateHSPECTransducer.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/BioClimateHSPECTransducer.java @@ -17,6 +17,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes; import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates; import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis; import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; +import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; public class BioClimateHSPECTransducer implements Transducerer{ @@ -37,7 +38,7 @@ public class BioClimateHSPECTransducer implements Transducerer{ @Override public void init() throws Exception { //init the analyzer - bioClimate=new BioClimateAnalysis(config.getConfigPath(),"./",config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), true); + bioClimate=new BioClimateAnalysis(config.getConfigPath(),"./",config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), false); //build the hspec names: hspecTables = config.getParam("HSPEC_TABLE_LIST").split(AlgorithmConfiguration.getListSeparator()); hspecTablesNames = config.getParam("HSPEC_TABLE_NAMES").split(AlgorithmConfiguration.getListSeparator()); @@ -108,7 +109,7 @@ public class BioClimateHSPECTransducer implements Transducerer{ } @Override - public void transform() throws Exception { + public void compute() throws Exception { status = 0.1f; try{ @@ -124,4 +125,18 @@ public class BioClimateHSPECTransducer implements Transducerer{ } } + + ResourceFactory resourceManager; + public String getResourceLoad() { + if (resourceManager==null) + resourceManager = new ResourceFactory(); + return resourceManager.getResourceLoad(1); + } + + + @Override + public String getResources() { + return ResourceFactory.getResources(100f); + } + } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/TestTrans.java b/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/TestTrans.java index 66da393..a8ee71d 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/TestTrans.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/transducers/TestTrans.java @@ -6,9 +6,12 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType; import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer; +import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; public class TestTrans implements Transducerer{ - + + float status = 0; + @Override public INFRASTRUCTURE getInfrastructure() { // TODO Auto-generated method stub @@ -35,14 +38,12 @@ public class TestTrans implements Transducerer{ @Override public float getStatus() { - // TODO Auto-generated method stub - return 0; + return status; } @Override public String getDescription() { - // TODO Auto-generated method stub - return null; + return ""; } @Override @@ -58,9 +59,20 @@ public class TestTrans implements Transducerer{ } @Override - public void transform() throws Exception { + public void compute() throws Exception { // TODO Auto-generated method stub } + ResourceFactory resourceManager; + public String getResourceLoad() { + if (resourceManager==null) + resourceManager = new ResourceFactory(); + return resourceManager.getResourceLoad(1); + } + + @Override + public String getResources() { + return ResourceFactory.getResources(100f); + } } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java b/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java index 074df5c..f8881ce 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/user/EvaluatorT.java @@ -17,8 +17,8 @@ public class EvaluatorT implements Runnable{ public void run() { try { - - PrimitiveType output = (PrimitiveType) dg.process(config); + dg.compute(); + PrimitiveType output = (PrimitiveType) dg.getOutput(); HashMap out = (HashMap)output.getContent(); DiscrepancyAnalysis.visualizeResults(out); diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/user/GeneratorT.java b/src/main/java/org/gcube/dataanalysis/ecoengine/user/GeneratorT.java index 63faff9..dfc1a16 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/user/GeneratorT.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/user/GeneratorT.java @@ -16,7 +16,7 @@ public class GeneratorT implements Runnable{ public void run() { try { - dg.generate(); + dg.compute(); } catch (Exception e) { } diff --git a/src/main/java/org/gcube/dataanalysis/ecoengine/user/ModelerT.java b/src/main/java/org/gcube/dataanalysis/ecoengine/user/ModelerT.java index d53fa6d..7775bac 100644 --- a/src/main/java/org/gcube/dataanalysis/ecoengine/user/ModelerT.java +++ b/src/main/java/org/gcube/dataanalysis/ecoengine/user/ModelerT.java @@ -19,7 +19,7 @@ public class ModelerT implements Runnable { public void run() { try { - dg.model(config, null); + dg.model(null); } catch (Exception e) { }