This commit is contained in:
Gianpaolo Coro 2012-08-30 15:17:02 +00:00
parent 525803876b
commit 9809d3df1e
53 changed files with 787 additions and 293 deletions

View File

@ -1,2 +1,3 @@
DISCREPANCY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis
QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis
QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis
HRS=org.gcube.dataanalysis.ecoengine.evaluation.HabitatRepresentativeness

View File

@ -1 +1,2 @@
TESTTRANS=org.gcube.dataanalysis.ecoengine.transducers.TestTrans
BIOCLIMATE_HSPEC=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPECTransducer

View File

@ -1,11 +1,9 @@
package org.gcube.dataanalysis.ecoengine.clustering;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
@ -21,9 +19,9 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
import org.hibernate.SessionFactory;
import org.postgresql.core.Utils;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.Attributes;
@ -91,7 +89,7 @@ public class DBScan implements Clusterer{
config.setConfigPath("./cfg/");
config.initRapidMiner();
dbscanner.setConfiguration(config);
dbscanner.cluster();
dbscanner.compute();
}
@ -125,7 +123,7 @@ public class DBScan implements Clusterer{
config.initRapidMiner();
long t0 = System.currentTimeMillis();
dbscanner.setConfiguration(config);
dbscanner.cluster();
dbscanner.compute();
System.out.println("ELAPSED "+(System.currentTimeMillis()-t0));
}
@ -150,7 +148,7 @@ public class DBScan implements Clusterer{
DBScan dbscanner = new DBScan();
dbscanner.setConfiguration(config);
dbscanner.init();
dbscanner.cluster();
dbscanner.compute();
System.out.println("ELAPSED "+(System.currentTimeMillis()-t0));
@ -258,7 +256,7 @@ public class DBScan implements Clusterer{
@Override
public void cluster() throws Exception {
public void compute() throws Exception {
if ((config==null)||epsilon==null||minPoints==null||points==null){
throw new Exception("DBScan: Error incomplete parameters");
@ -414,6 +412,21 @@ public class DBScan implements Clusterer{
return "Clustering with DBScan";
}
ResourceFactory resourceManager;
public String getResourceLoad() {
if (resourceManager==null)
resourceManager = new ResourceFactory();
return resourceManager.getResourceLoad(1);
}
@Override
public String getResources() {
return ResourceFactory.getResources(100f);
}
}

View File

@ -48,40 +48,27 @@ public class AlgorithmConfiguration extends LexicalEngineConfiguration implement
public static String RapidMinerOperatorsFile = "operators.xml";
public static String StatisticalManagerService = "StatisticalManager";
public static String StatisticalManagerClass = "Services";
public static String listSeparator="#";
public static String getListSeparator() {
return listSeparator;
}
public static void setListSeparator(String listSeparator) {
AlgorithmConfiguration.listSeparator = listSeparator;
}
public static int chunkSize = 100000;
public static int refreshResourcesTime = 10;
// database parameters
private String databaseDriver = "org.postgresql.Driver";
private String databaseURL = null;
private String databaseUserName = null;
private String databasePassword = null;
private String databaseDialect = null;
private String databaseIdleConnectionTestPeriod = null;
private String databaseAutomaticTestTable = null;
// Algorithm Parameters
private String configPath;
private String cachePath;
private String persistencePath;
private String distributionTable;
private String tableSpace;
private Boolean createTable = false;
private Boolean useDB = true;
private String envelopeTable;
private String csquarecodesTable;
private String occurrenceCellsTable;
private List<String> featuresTable;
private List<String> preprocessedTables;
private List<String> endpoints;
//service and remote
private String remoteCalculatorEndpoint;
private String serviceUserName;
private String remoteEnvironment;
private Integer numberOfResources = 0;
//modeling

View File

@ -13,5 +13,8 @@ public class ResourceLoad {
return "["+timestamp+", "+value+"]";
}
public static String defaultResourceLoad(){
long tk = System.currentTimeMillis();
return new ResourceLoad(tk, 1).toString();
}
}

View File

@ -3,6 +3,8 @@ package org.gcube.dataanalysis.ecoengine.connectors.livemonitor;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
public class Resources {
public List<SingleResource> list;
@ -17,4 +19,24 @@ public class Resources {
}
public static String buildLocalResourcesLog(int nres){
Resources res = new Resources();
try {
for (int i = 0; i < nres; i++) {
try {
double value = 100.00;
res.addResource("Thread_" + (i + 1), value);
} catch (Exception e1) {
}
}
} catch (Exception e) {
e.printStackTrace();
}
if ((res != null) && (res.list != null))
return HttpRequest.toJSon(res.list).replace("resId", "resID");
else
return "";
}
}

View File

@ -0,0 +1,31 @@
package org.gcube.dataanalysis.ecoengine.datatypes;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
public class InputTablesList extends StatisticalType {
protected List<InputTable> list;
protected List<TableTemplates> templateNames;
public InputTablesList(List<TableTemplates> templateNames, String name, String description, boolean optional) {
super(name, description, optional);
list = new ArrayList<InputTable>();
this.templateNames=templateNames;
}
public void add(InputTable st){
list.add(st);
}
public List<InputTable> getList(){
return list;
}
public List<TableTemplates> getTemplates(){
return templateNames;
}
}

View File

@ -0,0 +1,33 @@
package org.gcube.dataanalysis.ecoengine.datatypes;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
public class PrimitiveTypesList extends StatisticalType {
protected List<PrimitiveType> list;
PrimitiveTypes type;
public PrimitiveTypesList(PrimitiveTypes type, String name, String description, boolean optional) {
super(name, description, optional);
list = new ArrayList<PrimitiveType>();
this.type = type;
}
public void add(PrimitiveType st){
list.add(st);
}
public List<PrimitiveType> getList(){
return list;
}
public PrimitiveTypes getTemplates(){
return type;
}
}

View File

@ -30,6 +30,13 @@ public class StatisticalType {
}
public StatisticalType(String name, String description, boolean optional) {
this.name=name;
this.description=description;
this.defaultValue="";
this.optional=optional;
}
public String getDefaultValue() {
return defaultValue;
}

View File

@ -0,0 +1,24 @@
package org.gcube.dataanalysis.ecoengine.datatypes;
import java.util.ArrayList;
import java.util.List;
public class StatisticalTypeList <C extends StatisticalType> extends StatisticalType {
private List<C> list;
public StatisticalTypeList(String name, String description, boolean optional) {
super(name, description, optional);
list = new ArrayList<C>();
}
public void add(C st){
list.add(st);
}
public List<C> getList(){
return list;
}
}

View File

@ -7,6 +7,7 @@ public enum PrimitiveTypes {
RANDOM,
FILE,
MAP,
BOOLEAN
BOOLEAN,
IMAGES
}

View File

@ -7,7 +7,6 @@ import java.util.Map;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
@ -19,7 +18,6 @@ import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.hibernate.SessionFactory;
public class DiscrepancyAnalysis extends DataAnalysis {
@ -37,7 +35,7 @@ public class DiscrepancyAnalysis extends DataAnalysis {
float threshold = 0.1f;
String configPath = "./cfg/";
SessionFactory connection;
List<Float> errors;
double mean;
double variance;
@ -84,16 +82,8 @@ public class DiscrepancyAnalysis extends DataAnalysis {
}
@Override
public void init(AlgorithmConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init db connection
connection = AlgorithmConfiguration.getConnectionFromConfig(config);
}
@Override
public HashMap<String, String> analyze(AlgorithmConfiguration config) throws Exception {
public HashMap<String, String> analyze() throws Exception {
String FirstTableCsquareColumn = config.getParam("FirstTableCsquareColumn");
String SecondTableCsquareColumn = config.getParam("SecondTableCsquareColumn");
@ -148,17 +138,10 @@ public class DiscrepancyAnalysis extends DataAnalysis {
output.put("ACCURACY", "" + accuracy);
output.put("MAXIMUM_ERROR", "" + maxerror);
output.put("MAXIMUM_ERROR_POINT", "" + maxdiscrepancyPoint);
return output;
}
public void end() {
try {
connection.close();
} catch (Exception e) {
}
}
void calcDiscrepancy() {
double[] err = new double[errors.size()];
@ -206,9 +189,8 @@ public class DiscrepancyAnalysis extends DataAnalysis {
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), output, PrimitiveTypes.MAP, "ErrorsAnalysis","Analysis of the discrepancies");
return p;
@Override
public String getDescription() {
return "Discrepancy Analysis bewteen two HSPEC distributions";
}
}

View File

@ -17,7 +17,6 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.Attributes;
@ -34,7 +33,6 @@ public class DistributionQualityAnalysis extends DataAnalysis {
static String getProbabilititesQuery = "select count(*) as distribprob from %1$s as a join %2$s as b on a.%3$s=b.%4$s and b.%5$s %6$s %7$s";
static String getNumberOfElementsQuery = "select count(*) from %1$s";
static String getValuesQuery = "select %5$s as distribprob from %1$s as a join %2$s as b on a.%3$s=b.%4$s";
float threshold = 0.1f;
@ -146,7 +144,7 @@ public class DistributionQualityAnalysis extends DataAnalysis {
return points;
}
public HashMap<String, String> analyze(AlgorithmConfiguration config) throws Exception {
public HashMap<String, String> analyze() throws Exception {
try {
acceptanceThreshold = Float.parseFloat(config.getParam("PositiveThreshold"));
@ -206,7 +204,6 @@ public class DistributionQualityAnalysis extends DataAnalysis {
output.put("BESTTHRESHOLD", "" + bestThreshold);
return output;
}
public double calculateSensitivity(int TP, int FN) {
@ -280,21 +277,6 @@ public class DistributionQualityAnalysis extends DataAnalysis {
}
}
SessionFactory connection;
public void init(AlgorithmConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init db connection
connection = AlgorithmConfiguration.getConnectionFromConfig(config);
}
public void end() {
try {
connection.close();
} catch (Exception e) {
}
}
public static void main(String[] args) {
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
@ -355,4 +337,9 @@ public class DistributionQualityAnalysis extends DataAnalysis {
return p;
}
@Override
public String getDescription() {
return "Calculates the ROC, AUC and Accuracy of a model";
}
}

View File

@ -197,7 +197,7 @@ public class HabitatRepresentativeness extends DataAnalysis {
private double currentHRSScore;
private double [] currentHRSVector;
public HashMap<String, String> analyze(AlgorithmConfiguration config) throws Exception {
public HashMap<String, String> analyze() throws Exception {
try {
status = 0;
@ -236,8 +236,6 @@ public class HabitatRepresentativeness extends DataAnalysis {
AnalysisLogger.getLogger().error("ALERT: AN ERROR OCCURRED DURING HRS CALCULATION : " + e.getLocalizedMessage());
throw e;
} finally {
connection.close();
status = 100;
AnalysisLogger.getLogger().trace("COMPUTATION FINISHED ");
}
}
@ -271,22 +269,6 @@ public class HabitatRepresentativeness extends DataAnalysis {
}
}
SessionFactory connection;
public void init(AlgorithmConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init db connection
// connection = AlgorithmConfiguration.getConnectionFromConfig(config);
config.initRapidMiner();
}
public void end() {
try {
connection.close();
} catch (Exception e) {
}
}
public static void main(String[] args) throws Exception {
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
@ -303,8 +285,10 @@ public class HabitatRepresentativeness extends DataAnalysis {
// config.setParam("NegativeCasesTable", "absence_data_baskingshark2");
HabitatRepresentativeness hsrcalc = new HabitatRepresentativeness();
hsrcalc.init(config);
HashMap<String,String> output = hsrcalc.analyze(config);
hsrcalc.setConfiguration(config);
hsrcalc.init();
HashMap<String,String> output = hsrcalc.analyze();
for (String param:output.keySet()){
System.out.println(param+":"+output.get(param));
}
@ -346,4 +330,9 @@ public class HabitatRepresentativeness extends DataAnalysis {
return status==100f?status: Math.min((status+(float)(currentIterationStep+1)*innerstatus/(float)maxTests),99f);
}
@Override
public String getDescription() {
return "Calculates the Habitat Representativeness Score for a set of Features";
}
}

View File

@ -666,7 +666,7 @@ public class BioClimateAnalysis {
}
public int calcHighProbabilityCells(String hspec, double probabilty) throws Exception {
AnalysisLogger.getLogger().trace("Calculating High Prob Cells");
AnalysisLogger.getLogger().trace("Calculating High Prob Cells: "+String.format(countHighProbabilityCells, hspec, probabilty));
List<Object> countage = DatabaseFactory.executeSQLQuery(String.format(countHighProbabilityCells, hspec, probabilty), referencedbConnection);
int count = Integer.parseInt("" + countage.get(0));
AnalysisLogger.getLogger().trace("Calc High Prob Cells: " + count);
@ -745,7 +745,8 @@ public class BioClimateAnalysis {
config.setParam("MaxSamples", "" + 30000);
eval = EvaluatorsFactory.getEvaluators(config).get(0);
PrimitiveType output = (PrimitiveType) eval.process(config);
eval.compute();
PrimitiveType output = (PrimitiveType) eval.getOutput();
HashMap<String, String> out = (HashMap<String, String>)output.getContent();

View File

@ -1,29 +1,6 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
public interface Clusterer {
public INFRASTRUCTURE getInfrastructure();
public void init() throws Exception;
public void setConfiguration(AlgorithmConfiguration config);
public void shutdown();
public float getStatus();
public String getDescription();
public List<StatisticalType> getInputParameters();
public StatisticalType getOutput();
public void cluster() throws Exception;
public interface Clusterer extends ComputationalAgent{
}

View File

@ -2,6 +2,7 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
@ -22,5 +23,14 @@ public interface ComputationalAgent {
// gets the content of the model: e.g. Table indications etc.
public StatisticalType getOutput();
public void init() throws Exception;
public void setConfiguration(AlgorithmConfiguration config);
public void shutdown();
public String getDescription();
public void compute() throws Exception;
}

View File

@ -1,15 +1,16 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
import org.hibernate.SessionFactory;
/**
* Implements a mono-thread data analysis process
@ -22,34 +23,10 @@ public abstract class DataAnalysis implements Evaluator{
protected ResourceFactory resourceManager;
protected int processedRecords;
protected float status;
protected AlgorithmConfiguration config;
protected SessionFactory connection;
/**
* establishes input parameters for this algorithm along with their type
*/
public abstract List<StatisticalType> getInputParameters();
/**
* Executed the core of the algorithm
* @param config
* @return
* @throws Exception
*/
public abstract HashMap<String, String> analyze(AlgorithmConfiguration config) throws Exception;
/**
* initializes the procedure e.g. connects to the database
* @param config
* @throws Exception
*/
public abstract void init(AlgorithmConfiguration config) throws Exception;
/**
* ends the processing, e.g. closes connections
* @throws Exception
*/
public abstract void end();
public abstract HashMap<String, String> analyze() throws Exception;
/**
* Processing skeleton : init-analyze-end
@ -57,13 +34,13 @@ public abstract class DataAnalysis implements Evaluator{
* @return
* @throws Exception
*/
public StatisticalType process(AlgorithmConfiguration config) throws Exception{
HashMap<String, String> out;
public void compute() throws Exception{
status = 0;
HashMap<String, String> out = new HashMap<String, String>();
out = new HashMap<String, String>();
try{
init(config);
out = analyze(config);
end();
out = analyze();
shutdown();
}catch(Exception e){
e.printStackTrace();
throw e;
@ -71,9 +48,16 @@ public abstract class DataAnalysis implements Evaluator{
finally{
status = 100;
}
return new PrimitiveType(Map.class.getName(), out, PrimitiveTypes.MAP, "Analysis","Analysis Results");
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), out, PrimitiveTypes.MAP, "AnalysisResult","Analysis Values");
return p;
}
/**
* calculates the number of processed records per unity of time: the timing is calculated internally by the resourceManager and used when the method is interrogated
*/
@ -120,4 +104,23 @@ public abstract class DataAnalysis implements Evaluator{
}
public void init() throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init db connection
connection = AlgorithmConfiguration.getConnectionFromConfig(config);
config.initRapidMiner();
}
public void setConfiguration(AlgorithmConfiguration config) {
this.config = config;
}
public void shutdown() {
try {
connection.close();
} catch (Exception e) {
}
}
}

View File

@ -1,18 +1,7 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
public interface Evaluator extends ComputationalAgent{
public StatisticalType process(AlgorithmConfiguration config) throws Exception;
public abstract void init(AlgorithmConfiguration config) throws Exception;
public abstract void end();
}

View File

@ -8,17 +8,8 @@ public interface Generator extends ComputationalAgent{
public ALG_PROPS[] getSupportedAlgorithms();
public INFRASTRUCTURE getInfrastructure();
public void init() throws Exception;
public void setConfiguration(AlgorithmConfiguration config);
public void shutdown();
public SpatialProbabilityDistribution getAlgorithm();
public String getLoad();
public void generate() throws Exception;
public SpatialProbabilityDistribution getAlgorithm();
}

View File

@ -7,12 +7,9 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
public interface Model {
//defines the properties of this algorithm
public ALG_PROPS[] getProperties();
//defines the name of this model
public String getName();
//gets the description of the model

View File

@ -8,17 +8,10 @@ public interface Modeler extends ComputationalAgent{
public ALG_PROPS[] getSupportedModels();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public INFRASTRUCTURE getInfrastructure();
public Model getModel();
public void setmodel(Model model);
public void model(AlgorithmConfiguration Input, Model previousModel);
public void model(AlgorithmConfiguration Input);
public void stop();
public Model getModel();
public void model(Model previousModel);
}

View File

@ -1,29 +1,6 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
public interface Transducerer extends ComputationalAgent{
public interface Transducerer {
public INFRASTRUCTURE getInfrastructure();
public void init() throws Exception;
public void setConfiguration(AlgorithmConfiguration config);
public void shutdown();
public float getStatus();
public String getDescription();
public List<StatisticalType> getInputParameters();
public StatisticalType getOutput();
public void transform() throws Exception;
}

View File

@ -10,17 +10,18 @@ import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
public class SimpleModeler implements Modeler{
private Model innermodel;
protected Model innermodel;
protected AlgorithmConfiguration Input;
@Override
public void model(AlgorithmConfiguration Input, Model previousModel) {
public void model(Model previousModel) {
innermodel.init(Input, previousModel);
innermodel.train(Input, previousModel);
innermodel.postprocess(Input, previousModel);
}
@Override
public void model(AlgorithmConfiguration Input) {
public void compute() throws Exception{
innermodel.init(Input, null);
innermodel.train(Input, null);
innermodel.postprocess(Input, null);
@ -52,7 +53,7 @@ public class SimpleModeler implements Modeler{
}
@Override
public void stop() {
public void shutdown() {
innermodel.stop();
}
@ -76,4 +77,18 @@ public class SimpleModeler implements Modeler{
return innermodel.getOutput();
}
@Override
public void init() throws Exception {
}
@Override
public void setConfiguration(AlgorithmConfiguration config) {
Input = config;
}
@Override
public String getDescription() {
return "A Generic Modeler invoking training";
}
}

View File

@ -152,7 +152,7 @@ public class LocalSimpleSplitGenerator implements Generator {
}
@Override
public void generate() throws Exception {
public void compute() throws Exception {
// INITIALIZATION
long tstart = System.currentTimeMillis();
try {
@ -344,6 +344,11 @@ public class LocalSimpleSplitGenerator implements Generator {
return distributionModel;
}
@Override
public String getDescription() {
return "A generator which splits a distribution on different threads along the species dimension";
}
}

View File

@ -236,7 +236,7 @@ public class LocalSplitGenerator implements Generator {
}
@Override
public void generate() throws Exception {
public void compute() throws Exception {
// INITIALIZATION
long tstart = System.currentTimeMillis();
try {
@ -513,5 +513,10 @@ public class LocalSplitGenerator implements Generator {
return distributionModel;
}
@Override
public String getDescription() {
return "A generator based on tabular data production, which splits a distribution on different threads along the species dimension";
}
}

View File

@ -107,6 +107,7 @@ public class GeneratorsFactory {
//investigate on possible suitable generators
for (Object generatorName:pg.values()){
Generator gen = (Generator)Class.forName((String)generatorName).newInstance();
gen.setConfiguration(config);
ALG_PROPS[] supportedAlgs = gen.getSupportedAlgorithms();
boolean genSuitable = false;
for (ALG_PROPS prop:algp){

View File

@ -52,6 +52,7 @@ public static List<Modeler> getModelers(AlgorithmConfiguration config) throws Ex
//if the algorithm is a generator itself then execute it
if (algclass instanceof Modeler){
Modeler g = (Modeler) algclass;
g.setConfiguration(config);
modelers.add(g);
}
else
@ -64,6 +65,7 @@ public static List<Modeler> getModelers(AlgorithmConfiguration config) throws Ex
//investigate on possible suitable modelers
for (Object modelerName:pg.values()){
Modeler gen = (Modeler)Class.forName((String)modelerName).newInstance();
gen.setConfiguration(config);
ALG_PROPS[] supportedAlgs = gen.getSupportedModels();
boolean genSuitable = false;
for (ALG_PROPS prop:algp){

View File

@ -82,14 +82,16 @@ public class ProcessorsFactory {
if (algclass instanceof Generator) {
Generator g = (Generator) algclass;
g.setConfiguration(config);
g.init();
g.setConfiguration(config);
return g;
} else if (algclass instanceof Modeler) {
Modeler m = (Modeler) algclass;
m.setConfiguration(config);
return m;
}
else if (algclass instanceof Evaluator) {
Evaluator m = (Evaluator) algclass;
m.setConfiguration(config);
return m;
}
else if (algclass instanceof Clusterer) {

View File

@ -3,7 +3,6 @@ package org.gcube.dataanalysis.ecoengine.test;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
@ -98,47 +97,6 @@ public static void main(String[] args) throws Exception {
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
TestsMetaInfo tgs = new TestsMetaInfo();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfigTrans() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.HashMap;
import java.util.List;
@ -66,7 +66,7 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
dg.generate();
dg.compute();
} catch (Exception e) {
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
@ -63,7 +63,7 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
dg.generate();
dg.compute();
} catch (Exception e) {
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;

View File

@ -0,0 +1,82 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.clustering.DBScan;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
public class TestClusterer {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Clusterer> clus = ClusterersFactory.getClusterers(testConfigLocal());
clus .get(0).init();
cluster(clus .get(0));
clus = null;
}
private static void cluster(Clusterer clus) throws Exception {
if (clus != null) {
TestClusterer tgs = new TestClusterer();
ThreadCalculator tc = tgs.new ThreadCalculator(clus);
Thread t = new Thread(tc);
t.start();
while (clus.getStatus() < 100) {
System.out.println("STATUS: " + clus.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Clusterer dg;
public ThreadCalculator(Clusterer dg) {
this.dg = dg;
}
public void run() {
try {
dg.compute();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("OccurrencePointsTable","presence_basking_cluster");
config.setParam("FeaturesColumnNames","centerlat,centerlong");
config.setParam("OccurrencePointsClusterTable","occCluster_2");
config.setParam("epsilon","10");
config.setParam("minPoints","1");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(1);
config.setAgent("DBSCAN");
return config;
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.HashMap;
import java.util.List;
@ -74,7 +74,8 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
PrimitiveType output = (PrimitiveType) dg.process(config);
dg.compute();
PrimitiveType output = (PrimitiveType) dg.getOutput();
HashMap<String, String> out = (HashMap<String, String>)output.getContent();
DiscrepancyAnalysis.visualizeResults(out);

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.HashMap;
import java.util.List;
@ -57,7 +57,7 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
dg.generate();
dg.compute();
} catch (Exception e) {
}

View File

@ -0,0 +1,84 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
public class TestTransducer {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
transduce(trans.get(0));
trans = null;
}
private static void transduce(Transducerer trans) throws Exception {
if (trans != null) {
TestTransducer tgs = new TestTransducer();
ThreadCalculator tc = tgs.new ThreadCalculator(trans);
Thread t = new Thread(tc);
t.start();
while (trans.getStatus() < 100) {
System.out.println("STATUS: " + trans.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Transducerer dg;
public ThreadCalculator(Transducerer dg) {
this.dg = dg;
}
public void run() {
try {
dg.compute();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(1);
config.setAgent("BIOCLIMATE_HSPEC");
//config.setParam("HSPEC_TABLE_LIST", "hcaf_d, hcaf_d_2015_LINEAR_01338580273835,hcaf_d_2018_LINEAR_11338580276548,hcaf_d_2021_LINEAR_21338580279237,hcaf_d_2024_LINEAR_31338580282780,hcaf_d_2027_LINEAR_41338580283400,hcaf_d_2030_LINEAR_51338580284030,hcaf_d_2033_LINEAR_61338580284663,hcaf_d_2036_LINEAR_71338580285205,hcaf_d_2039_LINEAR_81338580285958,hcaf_d_2042_LINEAR_91338580286545,hcaf_d_2050");
//config.setParam("HSPEC_TABLE_NAMES", "test,test,test,test,test,test,test,test,test,test,test,test");
config.setParam("HSPEC_TABLE_LIST", "hspec_validation"+AlgorithmConfiguration.getListSeparator()+"hspec_validation2");
config.setParam("HSPEC_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test");
config.setParam("Threshold", "0.5");
config.setParam("DatabaseUserName", "gcube");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
return config;
}
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
@ -56,7 +56,7 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
dg.generate();
dg.compute();
} catch (Exception e) {
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
@ -56,7 +56,7 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
dg.model(config, null);
dg.model(null);
} catch (Exception e) {
}

View File

@ -1,4 +1,4 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
@ -56,7 +56,7 @@ public static void main(String[] args) throws Exception {
public void run() {
try {
dg.generate();
dg.compute();
} catch (Exception e) {
}

View File

@ -0,0 +1,40 @@
package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
public class RegressionTestClusterers {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Clusterer> clus = ClusterersFactory.getClusterers(testConfigLocal());
clus.get(0).init();
Regressor.process(clus.get(0));
clus = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(1);
config.setAgent("DBSCAN");
config.setParam("OccurrencePointsTable","presence_basking_cluster");
config.setParam("FeaturesColumnNames","centerlat,centerlong");
config.setParam("OccurrencePointsClusterTable","occCluster_2");
config.setParam("epsilon","10");
config.setParam("minPoints","1");
return config;
}
}

View File

@ -0,0 +1,69 @@
package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator;
import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory;
public class RegressionTestEvaluators {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
List<Evaluator> evaluators = EvaluatorsFactory.getEvaluators(testConfig1());
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
evaluators = null;
System.out.println("\n**********-------************\n");
//test Discrepancy
evaluators = EvaluatorsFactory.getEvaluators(testConfig2());
evaluators.get(0).init();
Regressor.process(evaluators.get(0));
evaluators = null;
}
private static AlgorithmConfiguration testConfig1() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(1);
config.setAgent("DISCREPANCY_ANALYSIS");
config.setParam("FirstTable", "hspec_native_baskingshark_aquamaps");
config.setParam("SecondTable", "hspec_suitable_nn_Fis22747");
config.setParam("FirstTableCsquareColumn", "csquarecode");
config.setParam("SecondTableCsquareColumn", "csquarecode");
config.setParam("FirstTableProbabilityColumn", "probability");
config.setParam("SecondTableProbabilityColumn", "probability");
config.setParam("ComparisonThreshold", "0.1");
return config;
}
private static AlgorithmConfiguration testConfig2() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(1);
config.setNumberOfResources(1);
config.setAgent("QUALITY_ANALYSIS");
config.setParam("PositiveCasesTable", "presence_data_baskingshark");
config.setParam("NegativeCasesTable", "absence_data_baskingshark2");
config.setParam("PositiveCasesTableKeyColumn", "csquarecode");
config.setParam("NegativeCasesTableKeyColumn", "csquarecode");
config.setParam("DistributionTable", "hspec_native_baskingshark_aquamaps");
config.setParam("DistributionTableKeyColumn", "csquarecode");
config.setParam("DistributionTableProbabilityColumn", "probability");
config.setParam("PositiveThreshold", "0.5");
config.setParam("NegativeThreshold", "0.5");
return config;
}
}

View File

@ -0,0 +1,41 @@
package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class RegressionTestGenerators {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(5);
config.setModel("AQUAMAPS_SUITABLE");
config.setParam("DistributionTable","hspec_suitable_test_gp");
config.setParam("CsquarecodesTable","hcaf_d");
config.setParam("EnvelopeTable","hspen_micro");
config.setParam("PreprocessedTable", "maxminlat_hspen");
config.setParam("CreateTable","true");
return config;
}
}

View File

@ -0,0 +1,43 @@
package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
public class RegressionTestModelers {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Modeler> modelers = ModelersFactory.getModelers(testConfigLocal());
modelers.get(0).init();
Regressor.process(modelers.get(0));
modelers = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setNumberOfResources(2);
config.setModel("HSPEN");
config.setParam("OuputEnvelopeTable","hspen_trained");
config.setParam("OccurrenceCellsTable","occurrencecells");
config.setParam("EnvelopeTable","hspen_mini");
config.setParam("CsquarecodesTable", "hcaf_d");
config.setParam("CreateTable","true");
return config;
}
}

View File

@ -0,0 +1,39 @@
package org.gcube.dataanalysis.ecoengine.test.regression;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Clusterer;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.processing.factories.ClusterersFactory;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
public class RegressionTestTransducers {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("BIOCLIMATE_HSPEC");
config.setParam("HSPEC_TABLE_LIST", "hspec_validation"+AlgorithmConfiguration.getListSeparator()+"hspec_validation2");
config.setParam("HSPEC_TABLE_NAMES", "test"+AlgorithmConfiguration.getListSeparator()+"test");
config.setParam("Threshold", "0.5");
return config;
}
}

View File

@ -0,0 +1,61 @@
package org.gcube.dataanalysis.ecoengine.test.regression;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
public class Regressor {
public static void process(ComputationalAgent agent) throws Exception {
if (agent != null) {
Regressor tgs = new Regressor();
ThreadCalculator tc = tgs.new ThreadCalculator(agent);
Thread t = new Thread(tc);
t.start();
while (agent.getStatus() < 100) {
String resLoad = agent.getResourceLoad();
String ress = agent.getResources();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("STATUS: " + agent.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
ComputationalAgent dg;
public ThreadCalculator(ComputationalAgent dg) {
this.dg = dg;
}
public void run() {
try {
dg.compute();
} catch (Exception e) {
}
}
}
public static AlgorithmConfiguration getConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
return config;
}
}

View File

@ -17,6 +17,7 @@ import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.BioClimateAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
public class BioClimateHSPECTransducer implements Transducerer{
@ -37,7 +38,7 @@ public class BioClimateHSPECTransducer implements Transducerer{
@Override
public void init() throws Exception {
//init the analyzer
bioClimate=new BioClimateAnalysis(config.getConfigPath(),"./",config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), true);
bioClimate=new BioClimateAnalysis(config.getConfigPath(),"./",config.getParam("DatabaseURL"),config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), false);
//build the hspec names:
hspecTables = config.getParam("HSPEC_TABLE_LIST").split(AlgorithmConfiguration.getListSeparator());
hspecTablesNames = config.getParam("HSPEC_TABLE_NAMES").split(AlgorithmConfiguration.getListSeparator());
@ -108,7 +109,7 @@ public class BioClimateHSPECTransducer implements Transducerer{
}
@Override
public void transform() throws Exception {
public void compute() throws Exception {
status = 0.1f;
try{
@ -124,4 +125,18 @@ public class BioClimateHSPECTransducer implements Transducerer{
}
}
ResourceFactory resourceManager;
public String getResourceLoad() {
if (resourceManager==null)
resourceManager = new ResourceFactory();
return resourceManager.getResourceLoad(1);
}
@Override
public String getResources() {
return ResourceFactory.getResources(100f);
}
}

View File

@ -6,9 +6,12 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.Transducerer;
import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory;
public class TestTrans implements Transducerer{
float status = 0;
@Override
public INFRASTRUCTURE getInfrastructure() {
// TODO Auto-generated method stub
@ -35,14 +38,12 @@ public class TestTrans implements Transducerer{
@Override
public float getStatus() {
// TODO Auto-generated method stub
return 0;
return status;
}
@Override
public String getDescription() {
// TODO Auto-generated method stub
return null;
return "";
}
@Override
@ -58,9 +59,20 @@ public class TestTrans implements Transducerer{
}
@Override
public void transform() throws Exception {
public void compute() throws Exception {
// TODO Auto-generated method stub
}
ResourceFactory resourceManager;
public String getResourceLoad() {
if (resourceManager==null)
resourceManager = new ResourceFactory();
return resourceManager.getResourceLoad(1);
}
@Override
public String getResources() {
return ResourceFactory.getResources(100f);
}
}

View File

@ -17,8 +17,8 @@ public class EvaluatorT implements Runnable{
public void run() {
try {
PrimitiveType output = (PrimitiveType) dg.process(config);
dg.compute();
PrimitiveType output = (PrimitiveType) dg.getOutput();
HashMap<String, String> out = (HashMap<String, String>)output.getContent();
DiscrepancyAnalysis.visualizeResults(out);

View File

@ -16,7 +16,7 @@ public class GeneratorT implements Runnable{
public void run() {
try {
dg.generate();
dg.compute();
} catch (Exception e) {
}

View File

@ -19,7 +19,7 @@ public class ModelerT implements Runnable {
public void run() {
try {
dg.model(config, null);
dg.model(null);
} catch (Exception e) {
}