This commit is contained in:
Gianpaolo Coro 2011-12-15 15:59:04 +00:00
parent f203bbf2bb
commit 198f839078
37 changed files with 746 additions and 1339 deletions

View File

@ -1,10 +1,10 @@
AQUAMAPS_SUITABLE=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsSuitable AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable
AQUAMAPS_NATIVE=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsNative AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative
AQUAMAPS_NATIVE_2050=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsNative2050 AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative2050
AQUAMAPS_SUITABLE_2050=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsSuitable2050 AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable2050
REMOTE_AQUAMAPS_SUITABLE=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator REMOTE_AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator
REMOTE_AQUAMAPS_NATIVE=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator REMOTE_AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator
REMOTE_AQUAMAPS_NATIVE_2050=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator REMOTE_AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator
REMOTE_AQUAMAPS_SUITABLE_2050=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator REMOTE_AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator
DUMMY=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.DummyAlgorithm DUMMY=org.gcube.dataanalysis.ecoengine.spatialdistributions.DummyAlgorithm
TEST=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.TestAlgorithm TEST=org.gcube.dataanalysis.ecoengine.spatialdistributions.TestAlgorithm

View File

@ -1,3 +1,3 @@
LOCAL_WITH_DATABASE=org.gcube.application.aquamaps.ecomodelling.generators.processing.LocalSplitGenerator LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator
SIMPLE_LOCAL=org.gcube.application.aquamaps.ecomodelling.generators.processing.LocalSimpleSplitGenerator SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator
REMOTE_RAINYCLOUD=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator REMOTE_RAINYCLOUD=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator

View File

@ -1,2 +1,2 @@
HSPEN=org.gcube.application.aquamaps.ecomodelling.generators.models.ModelHSPEN HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN
HSPEN_MODELER=org.gcube.application.aquamaps.ecomodelling.generators.modeling.SimpleModeler HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler

View File

@ -0,0 +1,144 @@
package org.gcube.dataanalysis.ecoengine.configuration;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
public class AlgorithmConfiguration extends LexicalEngineConfiguration{
public static Properties getProperties(String absoluteFilePath) {
Properties props = new Properties();
FileInputStream fis = null;
try {
fis = new FileInputStream(absoluteFilePath);
props.load(fis);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
fis.close();
} catch (Exception e) {
}
}
return props;
}
// constants
public static String defaultConnectionFile = "DestinationDBHibernate.cfg.xml";
public static String defaultLoggerFile = "ALog.properties";
public static String algorithmsFile = "algorithms.properties";
public static String generatorsFile = "generators.properties";
public static String modelsFile = "models.properties";
public static int chunkSize = 100000;
public static int refreshResourcesTime = 10;
// database parameters
private String databaseDriver = "org.postgresql.Driver";
private String databaseURL = null;
private String databaseUserName = null;
private String databasePassword = null;
private String databaseDialect = null;
private String databaseIdleConnectionTestPeriod = null;
private String databaseAutomaticTestTable = null;
// Algorithm Parameters
private String configPath;
private String cachePath;
private String persistencePath;
private String distributionTable;
private Boolean createTable = false;
private Boolean useDB = true;
private String envelopeTable;
private String csquarecodesTable;
private String occurrenceCellsTable;
private List<String> featuresTable;
private List<String> preprocessedTables;
//service and remote
private String remoteCalculatorEndpoint;
private String serviceUserName;
private String remoteEnvironment;
private Integer numberOfResources;
//modeling
private String model;
private String generator;
//other properties
private HashMap<String, String> generalProperties;
public String getParam(String key){
if (generalProperties != null)
return generalProperties.get(key);
else return null;
}
public void setParam(String key,String value){
if (generalProperties == null)
generalProperties = new HashMap<String, String>();
generalProperties.put(key,value);
}
public void setConfigPath(String configPath) {
if (!configPath.endsWith("/"))
configPath+="/";
this.configPath = configPath;
}
public String getConfigPath() {
return configPath;
}
public void setNumberOfResources(Integer numberOfThreads) {
this.numberOfResources = numberOfThreads;
}
public Integer getNumberOfResources() {
return numberOfResources;
}
public void addGeneralProperties(HashMap<String, String> generalProperties) {
for (String key:generalProperties.keySet()) {
this.generalProperties.put(key,generalProperties.get(key));
}
}
public void setGeneralProperties(HashMap<String, String> generalProperties) {
this.generalProperties = generalProperties;
}
public HashMap<String, String> getGeneralProperties() {
return generalProperties;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getPersistencePath() {
return persistencePath;
}
public void setPersistencePath(String persistencePath) {
this.persistencePath = persistencePath;
}
public String getGenerator() {
return generator;
}
public void setGenerator(String generator) {
this.generator = generator;
}
}

View File

@ -1,278 +0,0 @@
package org.gcube.dataanalysis.ecoengine.configuration;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
public class GenericConfiguration extends LexicalEngineConfiguration{
public static Properties getProperties(String absoluteFilePath) {
Properties props = new Properties();
FileInputStream fis = null;
try {
fis = new FileInputStream(absoluteFilePath);
props.load(fis);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
fis.close();
} catch (Exception e) {
}
}
return props;
}
// constants
public static String defaultConnectionFile = "DestinationDBHibernate.cfg.xml";
public static String defaultLoggerFile = "ALog.properties";
public static String algorithmsFile = "algorithms.properties";
public static String generatorsFile = "generators.properties";
public static String modelsFile = "models.properties";
public static int chunkSize = 100000;
public static int refreshResourcesTime = 10;
// database parameters
private String databaseDriver = "org.postgresql.Driver";
private String databaseURL = null;
private String databaseUserName = null;
private String databasePassword = null;
private String databaseDialect = null;
private String databaseIdleConnectionTestPeriod = null;
private String databaseAutomaticTestTable = null;
// Algorithm Parameters
private String configPath;
private String cachePath;
private String persistencePath;
private String distributionTable;
private Boolean createTable = false;
private Boolean useDB = true;
private String envelopeTable;
private String csquarecodesTable;
private String occurrenceCellsTable;
private List<String> featuresTable;
private List<String> preprocessedTables;
//service and remote
private String remoteCalculatorEndpoint;
private String serviceUserName;
private String remoteEnvironment;
private Integer numberOfResources;
//modeling
private String model;
private String generator;
//other properties
private HashMap<String, String> generalProperties;
public void setDatabaseDriver(String databaseDriver) {
this.databaseDriver = databaseDriver;
}
public String getDatabaseDriver() {
return databaseDriver;
}
public void setDatabaseURL(String databaseURL) {
this.databaseURL = databaseURL;
}
public String getDatabaseURL() {
return databaseURL;
}
public void setDatabaseUserName(String databaseUserName) {
this.databaseUserName = databaseUserName;
}
public String getDatabaseUserName() {
return databaseUserName;
}
public void setDatabasePassword(String databasePassword) {
this.databasePassword = databasePassword;
}
public String getDatabasePassword() {
return databasePassword;
}
public void setDatabaseDialect(String databaseDialect) {
this.databaseDialect = databaseDialect;
}
public String getDatabaseDialect() {
return databaseDialect;
}
public void setDatabaseIdleConnectionTestPeriod(String databaseIdleConnectionTestPeriod) {
this.databaseIdleConnectionTestPeriod = databaseIdleConnectionTestPeriod;
}
public String getDatabaseIdleConnectionTestPeriod() {
return databaseIdleConnectionTestPeriod;
}
public void setDatabaseAutomaticTestTable(String databaseAutomaticTestTable) {
this.databaseAutomaticTestTable = databaseAutomaticTestTable;
}
public String getDatabaseAutomaticTestTable() {
return databaseAutomaticTestTable;
}
public void setConfigPath(String configPath) {
if (!configPath.endsWith("/"))
configPath+="/";
this.configPath = configPath;
}
public String getConfigPath() {
return configPath;
}
public void setDistributionTable(String distributionTable) {
this.distributionTable = distributionTable;
}
public String getDistributionTable() {
return distributionTable;
}
public void setCreateTable(Boolean createTable) {
this.createTable = createTable;
}
public Boolean createTable() {
return createTable;
}
public void setNumberOfResources(Integer numberOfThreads) {
this.numberOfResources = numberOfThreads;
}
public Integer getNumberOfResources() {
return numberOfResources;
}
public void setUseDB(Boolean writeOnDB) {
this.useDB = writeOnDB;
}
public Boolean useDB() {
return useDB;
}
public void setRemoteCalculator(String remoteCalculator) {
this.remoteCalculatorEndpoint = remoteCalculator;
}
public String getRemoteCalculator() {
return remoteCalculatorEndpoint;
}
public void setServiceUserName(String serviceUserName) {
this.serviceUserName = serviceUserName;
}
public String getServiceUserName() {
return serviceUserName;
}
public void setCachePath(String cachePath) {
this.cachePath = cachePath;
}
public String getCachePath() {
return cachePath;
}
public void setGeneralProperties(HashMap<String, String> generalProperties) {
this.generalProperties = generalProperties;
}
public HashMap<String, String> getGeneralProperties() {
return generalProperties;
}
public void setRemoteEnvironment(String remoteEnvironment) {
this.remoteEnvironment = remoteEnvironment;
}
public String getRemoteEnvironment() {
return remoteEnvironment;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getEnvelopeTable() {
return envelopeTable;
}
public void setEnvelopeTable(String envelopeTable) {
this.envelopeTable = envelopeTable;
}
public String getCsquarecodesTable() {
return csquarecodesTable;
}
public void setCsquarecodesTable(String csquarecodesTable) {
this.csquarecodesTable = csquarecodesTable;
}
public List<String> getFeaturesTable() {
return featuresTable;
}
public void setFeaturesTable(List<String> featuresTable) {
this.featuresTable = featuresTable;
}
public List<String> getPreprocessedTables() {
return preprocessedTables;
}
public void setPreprocessedTables(List<String> preprocessedTables) {
this.preprocessedTables = preprocessedTables;
}
public String getOccurrenceCellsTable() {
return occurrenceCellsTable;
}
public void setOccurrenceCellsTable(String occurrenceCellsTable) {
this.occurrenceCellsTable = occurrenceCellsTable;
}
public String getPersistencePath() {
return persistencePath;
}
public void setPersistencePath(String persistencePath) {
this.persistencePath = persistencePath;
}
public String getGenerator() {
return generator;
}
public void setGenerator(String generator) {
this.generator = generator;
}
}

View File

@ -1,13 +1,18 @@
package org.gcube.dataanalysis.ecoengine.interfaces; package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT; import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
public interface Generator { public interface Generator {
public ALG_PROPS[] getSupportedAlgorithms(); public ALG_PROPS[] getSupportedAlgorithms();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
//gets the weight of the generator: according to this the generator will be placed in the execution order //gets the weight of the generator: according to this the generator will be placed in the execution order
public GENERATOR_WEIGHT getWeight(); public GENERATOR_WEIGHT getWeight();
@ -15,7 +20,7 @@ public interface Generator {
public void init(); public void init();
public void setConfiguration(GenericConfiguration config); public void setConfiguration(AlgorithmConfiguration config);
public void shutdown(); public void shutdown();

View File

@ -1,8 +1,11 @@
package org.gcube.dataanalysis.ecoengine.interfaces; package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
public interface SpatialProbabilityDistribution { public interface SpatialProbabilityDistribution {
//defines the properties of this algorithm //defines the properties of this algorithm
public ALG_PROPS[] getProperties(); public ALG_PROPS[] getProperties();
@ -11,4 +14,8 @@ public interface SpatialProbabilityDistribution {
//gets the description of the algorithm //gets the description of the algorithm
public String getDescription(); public String getDescription();
//set the input parameters for this generator
public HashMap<String, String> getInputParameters();
} }

View File

@ -3,14 +3,13 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
//implements a SpatialProbabilityDistribution where data are taken from a Database //implements a SpatialProbabilityDistribution where data are taken from a Database
public interface SpatialProbabilityDistributionGeneric extends SpatialProbabilityDistribution{ public interface SpatialProbabilityDistributionGeneric extends SpatialProbabilityDistribution{
//initialization of the distribution model //initialization of the distribution model
public void init(GenericConfiguration config); public void init(AlgorithmConfiguration config);
public String getMainInfoType(); public String getMainInfoType();

View File

@ -3,7 +3,7 @@ package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.Queue; import java.util.Queue;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
//implements a SpatialProbabilityDistribution where data are taken from a Database //implements a SpatialProbabilityDistribution where data are taken from a Database
@ -13,7 +13,7 @@ public interface SpatialProbabilityDistributionTable extends SpatialProbabilityD
public ALG_PROPS[] getProperties(); public ALG_PROPS[] getProperties();
//initialization of the distribution model //initialization of the distribution model
public void init(GenericConfiguration config,SessionFactory dbHibConnection); public void init(AlgorithmConfiguration config,SessionFactory dbHibConnection);
//get the way principal info will be queried //get the way principal info will be queried
public String getMainInfoQuery(); public String getMainInfoQuery();

View File

@ -9,7 +9,7 @@ import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.interfaces.Model;
@ -52,7 +52,7 @@ public class ModelHSPEN implements Model {
HashMap<String, List<Object>> allSpeciesHspen; HashMap<String, List<Object>> allSpeciesHspen;
private int lastProcessedRecordsNumber; private int lastProcessedRecordsNumber;
private long lastTime; private long lastTime;
GenericConfiguration outconfig; AlgorithmConfiguration outconfig;
@Override @Override
public float getVersion() { public float getVersion() {
@ -67,11 +67,11 @@ public class ModelHSPEN implements Model {
@Override @Override
public void init(Object Input, Model previousModel, Object Destination) { public void init(Object Input, Model previousModel, Object Destination) {
GenericConfiguration config = (GenericConfiguration) Input; AlgorithmConfiguration config = (AlgorithmConfiguration) Input;
outconfig = (GenericConfiguration) Destination; outconfig = (AlgorithmConfiguration) Destination;
defaultDatabaseFile = config.getConfigPath() + defaultDatabaseFile; defaultDatabaseFile = config.getConfigPath() + defaultDatabaseFile;
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile); AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
try { try {
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config); connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
} catch (Exception e) { } catch (Exception e) {
@ -80,14 +80,14 @@ public class ModelHSPEN implements Model {
} }
// initialize queries // initialize queries
dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getEnvelopeTable()); dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getEnvelopeTable()); dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicCreateTable = createHspenTable.replace("%HSPEN%", outconfig.getEnvelopeTable()); dynamicCreateTable = createHspenTable.replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicPopulateNewHspen = populateNewHspen.replace("%HSPEN_ORIGIN%", config.getEnvelopeTable()).replace("%HSPEN%", outconfig.getEnvelopeTable()); dynamicPopulateNewHspen = populateNewHspen.replace("%HSPEN_ORIGIN%", config.getParam("EnvelopeTable")).replace("%HSPEN%", outconfig.getParam("EnvelopeTable"));
dynamicSpeciesListQuery = speciesListQuery.replace("%HSPEN%", config.getEnvelopeTable()); dynamicSpeciesListQuery = speciesListQuery.replace("%HSPEN%", config.getParam("EnvelopeTable"));
dynamicHspenInformationQuery = hspenListQuery.replace("%HSPEN%", config.getEnvelopeTable()); dynamicHspenInformationQuery = hspenListQuery.replace("%HSPEN%", config.getParam("EnvelopeTable"));
currentHCAFTable = config.getCsquarecodesTable(); currentHCAFTable = config.getParam("CsquarecodesTable");
currentOccurrenceTable = config.getOccurrenceCellsTable(); currentOccurrenceTable = config.getParam("OccurrenceCellsTable");
// Threads // Threads
numberOfthreads = config.getNumberOfResources(); numberOfthreads = config.getNumberOfResources();
@ -151,9 +151,9 @@ public class ModelHSPEN implements Model {
} }
private void generateTable(Object Input) throws Exception { private void generateTable(Object Input) throws Exception {
GenericConfiguration config = (GenericConfiguration) Input; AlgorithmConfiguration config = (AlgorithmConfiguration) Input;
// create and populate the novel table // create and populate the novel table
if (config.createTable()){ if (config.getParam("CreateTable").equalsIgnoreCase("true")){
AnalysisLogger.getLogger().trace("Distribution Generator->recreating new table " + dynamicCreateTable); AnalysisLogger.getLogger().trace("Distribution Generator->recreating new table " + dynamicCreateTable);
try{ try{
DatabaseFactory.executeSQLUpdate(String.format(dynamicDropTable, config.getDatabaseUserName()), connection); DatabaseFactory.executeSQLUpdate(String.format(dynamicDropTable, config.getDatabaseUserName()), connection);
@ -326,7 +326,7 @@ public class ModelHSPEN implements Model {
@Override @Override
public String getContentType() { public String getContentType() {
return GenericConfiguration.class.getName(); return AlgorithmConfiguration.class.getName();
} }
@Override @Override
@ -397,12 +397,12 @@ public class ModelHSPEN implements Model {
@Override @Override
public String getInputType() { public String getInputType() {
return GenericConfiguration.class.getName(); return AlgorithmConfiguration.class.getName();
} }
@Override @Override
public String getOutputType() { public String getOutputType() {
return GenericConfiguration.class.getName(); return AlgorithmConfiguration.class.getName();
} }
} }

View File

@ -1,5 +1,6 @@
package org.gcube.dataanalysis.ecoengine.processing; package org.gcube.dataanalysis.ecoengine.processing;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
@ -12,8 +13,8 @@ import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT; import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
@ -21,7 +22,7 @@ import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributio
public class LocalSimpleSplitGenerator implements Generator { public class LocalSimpleSplitGenerator implements Generator {
private GenericConfiguration config; private AlgorithmConfiguration config;
private ExecutorService executorService; private ExecutorService executorService;
private int numberOfThreadsToUse; private int numberOfThreadsToUse;
private boolean threadActivity[]; private boolean threadActivity[];
@ -39,11 +40,6 @@ public class LocalSimpleSplitGenerator implements Generator {
//species Objects -> (geographical Object , Probability) //species Objects -> (geographical Object , Probability)
ConcurrentHashMap<Object,Map<Object,Float>> completeDistribution; ConcurrentHashMap<Object,Map<Object,Float>> completeDistribution;
public LocalSimpleSplitGenerator(GenericConfiguration config) {
setConfiguration(config);
init();
}
public LocalSimpleSplitGenerator() { public LocalSimpleSplitGenerator() {
} }
@ -93,7 +89,7 @@ public class LocalSimpleSplitGenerator implements Generator {
@Override @Override
public void init() { public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile); AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
stopInterrupt = false; stopInterrupt = false;
completeDistribution = new ConcurrentHashMap<Object, Map<Object,Float>>(); completeDistribution = new ConcurrentHashMap<Object, Map<Object,Float>>();
try { try {
@ -108,14 +104,14 @@ public class LocalSimpleSplitGenerator implements Generator {
} }
private void initModel() throws Exception { private void initModel() throws Exception {
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.algorithmsFile); Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.algorithmsFile);
String objectclass = p.getProperty(config.getModel()); String objectclass = p.getProperty(config.getModel());
distributionModel = (SpatialProbabilityDistributionGeneric) Class.forName(objectclass).newInstance(); distributionModel = (SpatialProbabilityDistributionGeneric) Class.forName(objectclass).newInstance();
distributionModel.init(config); distributionModel.init(config);
} }
@Override @Override
public void setConfiguration(GenericConfiguration config) { public void setConfiguration(AlgorithmConfiguration config) {
this.config = config; this.config = config;
if (config.getNumberOfResources() == 0) if (config.getNumberOfResources() == 0)
this.numberOfThreadsToUse = 1; this.numberOfThreadsToUse = 1;
@ -334,6 +330,11 @@ public class LocalSimpleSplitGenerator implements Generator {
return GENERATOR_WEIGHT.LOWEST; return GENERATOR_WEIGHT.LOWEST;
} }
@Override
public HashMap<String, String> getInputParameters() {
return distributionModel.getInputParameters();
}
} }

View File

@ -1,5 +1,6 @@
package org.gcube.dataanalysis.ecoengine.processing; package org.gcube.dataanalysis.ecoengine.processing;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import java.util.Queue; import java.util.Queue;
@ -14,8 +15,8 @@ import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions; import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT; import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
@ -25,7 +26,7 @@ import org.hibernate.SessionFactory;
public class LocalSplitGenerator implements Generator { public class LocalSplitGenerator implements Generator {
private GenericConfiguration config; private AlgorithmConfiguration config;
private ExecutorService executorService; private ExecutorService executorService;
private int numberOfThreadsToUse; private int numberOfThreadsToUse;
private boolean threadActivity[]; private boolean threadActivity[];
@ -45,7 +46,7 @@ public class LocalSplitGenerator implements Generator {
String probabilityInsertionStatement = "insert into %1$s (speciesid,csquarecode,probability %ADDEDINFORMATION%) VALUES %2$s"; String probabilityInsertionStatement = "insert into %1$s (speciesid,csquarecode,probability %ADDEDINFORMATION%) VALUES %2$s";
public LocalSplitGenerator(GenericConfiguration config) { public LocalSplitGenerator(AlgorithmConfiguration config) {
setConfiguration(config); setConfiguration(config);
init(); init();
} }
@ -99,7 +100,7 @@ public class LocalSplitGenerator implements Generator {
@Override @Override
public void init() { public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile); AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
stopInterrupt = false; stopInterrupt = false;
flushInterrupt = false; flushInterrupt = false;
initDBSession(); initDBSession();
@ -124,19 +125,19 @@ public class LocalSplitGenerator implements Generator {
AnalysisLogger.getLogger().trace("init()->insertion scheduler initialized"); AnalysisLogger.getLogger().trace("init()->insertion scheduler initialized");
// inizialize the scheduler for the insertions // inizialize the scheduler for the insertions
Timer writerScheduler = new Timer(); Timer writerScheduler = new Timer();
writerScheduler.schedule(new DatabaseWriter(), 0, GenericConfiguration.refreshResourcesTime); writerScheduler.schedule(new DatabaseWriter(), 0, AlgorithmConfiguration.refreshResourcesTime);
} }
} }
private void initModel() throws Exception { private void initModel() throws Exception {
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.algorithmsFile); Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.algorithmsFile);
String objectclass = p.getProperty(config.getModel()); String objectclass = p.getProperty(config.getModel());
distributionModel = (SpatialProbabilityDistributionTable) Class.forName(objectclass).newInstance(); distributionModel = (SpatialProbabilityDistributionTable) Class.forName(objectclass).newInstance();
distributionModel.init(config, dbHibConnection); distributionModel.init(config, dbHibConnection);
} }
@Override @Override
public void setConfiguration(GenericConfiguration config) { public void setConfiguration(AlgorithmConfiguration config) {
this.config = config; this.config = config;
if (config.getNumberOfResources() == 0) if (config.getNumberOfResources() == 0)
this.numberOfThreadsToUse = 1; this.numberOfThreadsToUse = 1;
@ -158,7 +159,13 @@ public class LocalSplitGenerator implements Generator {
try { try {
if ((config != null) && (config.getConfigPath() != null)) { if ((config != null) && (config.getConfigPath() != null)) {
String defaultDatabaseFile = config.getConfigPath() + GenericConfiguration.defaultConnectionFile; String defaultDatabaseFile = config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile;
config.setDatabaseDriver(config.getParam("DatabaseDriver"));
config.setDatabaseUserName(config.getParam("DatabaseUserName"));
config.setDatabasePassword(config.getParam("DatabasePassword"));
config.setDatabaseURL(config.getParam("DatabaseURL"));
dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config); dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
} }
} catch (Exception e) { } catch (Exception e) {
@ -169,13 +176,15 @@ public class LocalSplitGenerator implements Generator {
} }
private void createTable() throws Exception { private void createTable() throws Exception {
if (config.createTable()) { if (config.getParam("CreateTable")!=null && config.getParam("CreateTable").equalsIgnoreCase("true")) {
try { try {
DatabaseFactory.executeSQLUpdate("drop table " + config.getDistributionTable(), dbHibConnection); AnalysisLogger.getLogger().trace("recreating table: "+"drop table " + config.getParam("DistributionTable"));
DatabaseFactory.executeSQLUpdate("drop table " + config.getParam("DistributionTable"), dbHibConnection);
AnalysisLogger.getLogger().trace("recreating table->OK");
} catch (Exception e) { } catch (Exception e) {
AnalysisLogger.getLogger().trace("recreating table->"+e.getLocalizedMessage());
} }
DatabaseFactory.executeUpdateNoTransaction(distributionModel.getDistributionTableStatement(), config.getDatabaseDriver(), config.getDatabaseUserName(), config.getDatabasePassword(), config.getDatabaseURL(), true); DatabaseFactory.executeUpdateNoTransaction(distributionModel.getDistributionTableStatement(), config.getParam("DatabaseDriver"), config.getParam("DatabaseUserName"), config.getParam("DatabasePassword"), config.getParam("DatabaseURL"), true);
AnalysisLogger.getLogger().trace("createTable()->OK!"); AnalysisLogger.getLogger().trace("createTable()->OK!");
} }
@ -322,10 +331,12 @@ public class LocalSplitGenerator implements Generator {
AnalysisLogger.getLogger().error(e); AnalysisLogger.getLogger().error(e);
throw e; throw e;
} finally { } finally {
try{
// REPORT OVERALL ELAPSED TIME // REPORT OVERALL ELAPSED TIME
distributionModel.postProcess(); distributionModel.postProcess();
// shutdown all // shutdown all
shutdown(); shutdown();
}catch(Exception e){}
long tend = System.currentTimeMillis(); long tend = System.currentTimeMillis();
long ttotal = tend - tstart; long ttotal = tend - tstart;
AnalysisLogger.getLogger().warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n"); AnalysisLogger.getLogger().warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n");
@ -398,9 +409,9 @@ public class LocalSplitGenerator implements Generator {
AnalysisLogger.getLogger().trace("\t...finished flushing on db"); AnalysisLogger.getLogger().trace("\t...finished flushing on db");
flushInterrupt = true; flushInterrupt = true;
this.cancel(); this.cancel();
} else if ((probabilityBuffer != null) && (probabilityBuffer.size() > GenericConfiguration.chunkSize)) { } else if ((probabilityBuffer != null) && (probabilityBuffer.size() > AlgorithmConfiguration.chunkSize)) {
// AnalysisLogger.getLogger().trace("\t...writing on db"); // AnalysisLogger.getLogger().trace("\t...writing on db");
writeOnDB(GenericConfiguration.chunkSize); writeOnDB(AlgorithmConfiguration.chunkSize);
// AnalysisLogger.getLogger().trace("\t...finished writing on db"); // AnalysisLogger.getLogger().trace("\t...finished writing on db");
} }
} catch (Exception e) { } catch (Exception e) {
@ -414,8 +425,8 @@ public class LocalSplitGenerator implements Generator {
public void flushBuffer() { public void flushBuffer() {
if ((probabilityBuffer != null) && (probabilityBuffer.size() > 0)) { if ((probabilityBuffer != null) && (probabilityBuffer.size() > 0)) {
while (probabilityBuffer.size() > GenericConfiguration.chunkSize) while (probabilityBuffer.size() > AlgorithmConfiguration.chunkSize)
writeOnDB(GenericConfiguration.chunkSize); writeOnDB(AlgorithmConfiguration.chunkSize);
writeOnDB(probabilityBuffer.size()); writeOnDB(probabilityBuffer.size());
} }
@ -434,7 +445,7 @@ public class LocalSplitGenerator implements Generator {
} }
} }
String insertionString = String.format(probabilityInsertionStatement, config.getDistributionTable(), sb.toString()); String insertionString = String.format(probabilityInsertionStatement, config.getParam("DistributionTable"), sb.toString());
try { try {
// AnalysisLogger.getLogger().debug("->"+insertionString); // AnalysisLogger.getLogger().debug("->"+insertionString);
@ -462,4 +473,9 @@ public class LocalSplitGenerator implements Generator {
return GENERATOR_WEIGHT.LOWEST; return GENERATOR_WEIGHT.LOWEST;
} }
@Override
public HashMap<String, String> getInputParameters() {
return distributionModel.getInputParameters();
}
} }

View File

@ -1,10 +1,12 @@
package org.gcube.dataanalysis.ecoengine.processing; package org.gcube.dataanalysis.ecoengine.processing;
import java.util.HashMap;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT; import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteGenerationManager; import org.gcube.dataanalysis.ecoengine.connectors.RemoteGenerationManager;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject; import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject; import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
@ -12,85 +14,81 @@ import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
public class RainyCloudGenerator implements Generator{ public class RainyCloudGenerator implements Generator {
GenericConfiguration config; AlgorithmConfiguration config;
private boolean interruptProcessing; private boolean interruptProcessing;
RemoteGenerationManager remoteGenerationManager; RemoteGenerationManager remoteGenerationManager;
RemoteHspecInputObject rhio; RemoteHspecInputObject rhio;
public RainyCloudGenerator(GenericConfiguration config) { public RainyCloudGenerator(AlgorithmConfiguration config) {
setConfiguration(config); setConfiguration(config);
init(); init();
} }
public RainyCloudGenerator() { public RainyCloudGenerator() {
} }
@Override @Override
public float getStatus() { public float getStatus() {
RemoteHspecOutputObject oo = remoteGenerationManager.retrieveCompleteStatus(); RemoteHspecOutputObject oo = remoteGenerationManager.retrieveCompleteStatus();
// if (oo.status.equals("DONE")||oo.status.equals("ERROR")) // if (oo.status.equals("DONE")||oo.status.equals("ERROR"))
if (oo.status.equals("DONE")) if (oo.status.equals("DONE")) {
{
stopProcess(); stopProcess();
return 100f; return 100f;
} } else {
else { float remoteStatus = (float) remoteGenerationManager.retrieveCompletion();
float remoteStatus =(float)remoteGenerationManager.retrieveCompletion(); return (remoteStatus == 100) ? 99 : remoteStatus;
return (remoteStatus==100)?99:remoteStatus;
} }
} }
@Override @Override
public void init() { public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile); AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
interruptProcessing = false; interruptProcessing = false;
rhio = new RemoteHspecInputObject(); rhio = new RemoteHspecInputObject();
rhio.userName = config.getServiceUserName(); rhio.userName = config.getParam("ServiceUserName");
rhio.environment = config.getRemoteEnvironment(); rhio.environment = config.getParam("RemoteEnvironment");
rhio.configuration = config.getGeneralProperties(); rhio.configuration = config.getGeneralProperties();
rhio.generativeModel = config.getModel();
String jdbcUrl = config.getDatabaseURL();
String userName = config.getDatabaseUserName();
String password = config.getDatabasePassword();
jdbcUrl += ";username="+userName+";password="+password;
//jdbc:sqlserver://localhost;user=MyUserName;password=*****;
rhio.hcafTableName.tableName = config.getCsquarecodesTable();
rhio.hcafTableName.jdbcUrl=jdbcUrl;
rhio.hspecDestinationTableName.tableName = config.getDistributionTable(); rhio.generativeModel = config.getModel();
rhio.hspecDestinationTableName.jdbcUrl=jdbcUrl;
String jdbcUrl = config.getParam("DatabaseURL");
rhio.hspenTableName.tableName = config.getEnvelopeTable(); String userName = config.getParam("DatabaseUserName");
rhio.hspenTableName.jdbcUrl=jdbcUrl; String password = config.getParam("DatabasePassword");
jdbcUrl += ";username=" + userName + ";password=" + password;
rhio.occurrenceCellsTable.tableName = "maxminlat_"+config.getEnvelopeTable();
rhio.occurrenceCellsTable.jdbcUrl=jdbcUrl; // jdbc:sqlserver://localhost;user=MyUserName;password=*****;
rhio.hcafTableName.tableName = config.getParam("CsquarecodesTable");
rhio.hcafTableName.jdbcUrl = jdbcUrl;
rhio.hspecDestinationTableName.tableName = config.getParam("DistributionTable");
rhio.hspecDestinationTableName.jdbcUrl = jdbcUrl;
rhio.hspenTableName.tableName = config.getParam("EnvelopeTable");
rhio.hspenTableName.jdbcUrl = jdbcUrl;
rhio.occurrenceCellsTable.tableName = "maxminlat_" + config.getParam("EnvelopeTable");
rhio.occurrenceCellsTable.jdbcUrl = jdbcUrl;
rhio.nWorkers = config.getNumberOfResources(); rhio.nWorkers = config.getNumberOfResources();
if (config.getModel().contains("2050")) if (config.getModel().contains("2050"))
rhio.is2050 = true; rhio.is2050 = true;
else else
rhio.is2050 = false; rhio.is2050 = false;
if (config.getModel().contains("NATIVE")) if (config.getModel().contains("NATIVE"))
rhio.isNativeGeneration = true; rhio.isNativeGeneration = true;
else else
rhio.isNativeGeneration = false; rhio.isNativeGeneration = false;
//create and call the remote generator // create and call the remote generator
remoteGenerationManager = new RemoteGenerationManager(config.getRemoteCalculator()); remoteGenerationManager = new RemoteGenerationManager(config.getParam("RemoteCalculator"));
} }
@Override @Override
public void setConfiguration(GenericConfiguration config) { public void setConfiguration(AlgorithmConfiguration config) {
this.config = config; this.config = config;
} }
@ -106,29 +104,30 @@ public class RainyCloudGenerator implements Generator{
@Override @Override
public String getResourceLoad() { public String getResourceLoad() {
String returnString = "[]"; String returnString = "[]";
try{ try {
RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus(); RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus();
if (rhoo.metrics.throughput.size()>1) if (rhoo.metrics.throughput.size() > 1) {
{ ResourceLoad rs = new ResourceLoad(rhoo.metrics.throughput.get(0), rhoo.metrics.throughput.get(1));
ResourceLoad rs = new ResourceLoad(rhoo.metrics.throughput.get(0),rhoo.metrics.throughput.get(1));
returnString = rs.toString(); returnString = rs.toString();
} }
}catch(Exception e){} } catch (Exception e) {
}
return returnString; return returnString;
} }
@Override @Override
public String getResources() { public String getResources() {
Resources res = new Resources(); Resources res = new Resources();
try{ try {
RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus(); RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus();
res.list = rhoo.metrics.load; res.list = rhoo.metrics.load;
}catch(Exception e){} } catch (Exception e) {
if ((res!=null) && (res.list!=null)) }
if ((res != null) && (res.list != null))
return HttpRequest.toJSon(res.list).replace("resId", "resID"); return HttpRequest.toJSon(res.list).replace("resId", "resID");
else else
return "[]"; return "[]";
} }
@ -136,9 +135,8 @@ public class RainyCloudGenerator implements Generator{
public String getLoad() { public String getLoad() {
RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus(); RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus();
String returnString = "[]"; String returnString = "[]";
if ((rhoo.metrics.throughput!=null)&&(rhoo.metrics.throughput.size()>1)) if ((rhoo.metrics.throughput != null) && (rhoo.metrics.throughput.size() > 1)) {
{ ResourceLoad rs = new ResourceLoad(rhoo.metrics.throughput.get(0), rhoo.metrics.throughput.get(1));
ResourceLoad rs = new ResourceLoad(rhoo.metrics.throughput.get(0),rhoo.metrics.throughput.get(1));
returnString = rs.toString(); returnString = rs.toString();
} }
return returnString; return returnString;
@ -146,36 +144,52 @@ public class RainyCloudGenerator implements Generator{
@Override @Override
public void generate() throws Exception { public void generate() throws Exception {
try{ try {
remoteGenerationManager.submitJob(rhio); remoteGenerationManager.submitJob(rhio);
}catch(Exception e){ } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }
AnalysisLogger.getLogger().trace("REMOTE PROCESSING STARTED"); AnalysisLogger.getLogger().trace("REMOTE PROCESSING STARTED");
boolean finish = false; boolean finish = false;
while (!finish && !interruptProcessing){ while (!finish && !interruptProcessing) {
float status = getStatus(); float status = getStatus();
// AnalysisLogger.getLogger().trace("Status "+status); // AnalysisLogger.getLogger().trace("Status "+status);
if (status==100) finish = true; if (status == 100)
finish = true;
Thread.sleep(500); Thread.sleep(500);
} }
AnalysisLogger.getLogger().trace("REMOTE PROCESSING ENDED"); AnalysisLogger.getLogger().trace("REMOTE PROCESSING ENDED");
} }
@Override @Override
public ALG_PROPS[] getSupportedAlgorithms() { public ALG_PROPS[] getSupportedAlgorithms() {
ALG_PROPS[] p = {ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE}; ALG_PROPS[] p = { ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE };
return p; return p;
} }
@Override @Override
public GENERATOR_WEIGHT getWeight() { public GENERATOR_WEIGHT getWeight() {
return GENERATOR_WEIGHT.HIGH; return GENERATOR_WEIGHT.HIGH;
} }
@Override
public HashMap<String, String> getInputParameters() {
HashMap<String, String> parameters = new HashMap<String, String>();
parameters.put("RemoteEnvironment", "");
parameters.put("ServiceUserName", "");
parameters.put("CsquarecodesTable", "");
parameters.put("DatabaseURL", "");
parameters.put("DatabaseUserName", "");
parameters.put("DatabasePassword", "");
parameters.put("DistributionTable", "");
parameters.put("EnvelopeTable", "");
parameters.put("RemoteCalculator", "");
return parameters;
}
} }

View File

@ -8,7 +8,7 @@ import java.util.ServiceLoader;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
@ -51,45 +51,65 @@ public class GeneratorsFactory {
gens.get(i).init(); gens.get(i).init();
} }
*/ */
}
public static List<String> getProbabilityDistributionAlgorithms(String configPath) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.algorithmsFile);
List<String> algs = new ArrayList<String>();
for (Object algName: p.keySet()){
algs.add((String)algName);
}
return algs;
} }
public static void main1(String[] args) throws Exception { public static List<String> getAllGenerators(String configPath) throws Exception{
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_remote_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setModel("AQUAMAPS_SUITABLE");
config.setGenerator("LOCAL_WITH_DATABASE");
Generator gen = GeneratorsFactory.getGenerator(config);
System.out.println(gen.getClass());
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.generatorsFile);
List<String> gens = new ArrayList<String>();
for (Object genName: p.keySet()){
gens.add((String)genName);
}
return gens;
} }
public static List<String> getModels(String configPath) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.modelsFile);
List<String> models = new ArrayList<String>();
for (Object modelName: p.keySet()){
models.add((String)modelName);
}
return models;
}
public static Generator getGenerator(GenericConfiguration config) throws Exception { public static HashMap<String,String> getAlgorithmParameters(String configPath, String algorithmName) throws Exception{
Properties p = AlgorithmConfiguration.getProperties(configPath + AlgorithmConfiguration.algorithmsFile);
String algorithmclass = p.getProperty(algorithmName);
Object algclass = Class.forName(algorithmclass).newInstance();
//if the algorithm is a generator itself then take it
if (algclass instanceof Generator){
return ((Generator) algclass).getInputParameters();
}
else
return ((SpatialProbabilityDistribution) algclass).getInputParameters();
}
public static Generator getGenerator(AlgorithmConfiguration config) throws Exception {
//modify this class in order to take the right generator algorithm //modify this class in order to take the right generator algorithm
try { try {
//initialize the logger //initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile); AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
//take the algorithm //take the algorithm
String algorithm = config.getGenerator(); String algorithm = config.getGenerator();
if (algorithm == null) throw new Exception("GENERATOR NOT SPECIFIED"); if (algorithm == null) throw new Exception("GENERATOR NOT SPECIFIED");
//take the algorithms list //take the algorithms list
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.generatorsFile); Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.generatorsFile);
String algorithmclass = p.getProperty(algorithm); String algorithmclass = p.getProperty(algorithm);
Object algclass = Class.forName(algorithmclass).newInstance(); Object algclass = Class.forName(algorithmclass).newInstance();
Generator g = (Generator) algclass; Generator g = (Generator) algclass;
@ -103,17 +123,17 @@ public class GeneratorsFactory {
} }
public static List<Generator> getGenerators(GenericConfiguration config) throws Exception { public static List<Generator> getGenerators(AlgorithmConfiguration config) throws Exception {
//modify this class in order to manage generators weight and match algorithm vs generators //modify this class in order to manage generators weight and match algorithm vs generators
List<Generator> generators = new ArrayList<Generator>(); List<Generator> generators = new ArrayList<Generator>();
try { try {
//initialize the logger //initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile); AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
//take the algorithm //take the algorithm
String algorithm = config.getModel(); String algorithm = config.getModel();
//take the algorithms list //take the algorithms list
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.algorithmsFile); Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.algorithmsFile);
String algorithmclass = p.getProperty(algorithm); String algorithmclass = p.getProperty(algorithm);
Object algclass = Class.forName(algorithmclass).newInstance(); Object algclass = Class.forName(algorithmclass).newInstance();
//if the algorithm is a generator itself then execute it //if the algorithm is a generator itself then execute it
@ -128,7 +148,7 @@ public class GeneratorsFactory {
//take alg's properties //take alg's properties
ALG_PROPS[] algp = sp.getProperties(); ALG_PROPS[] algp = sp.getProperties();
//take all generators //take all generators
Properties pg = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.generatorsFile); Properties pg = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.generatorsFile);
//investigate on possible suitable generators //investigate on possible suitable generators
for (Object generatorName:pg.values()){ for (Object generatorName:pg.values()){
Generator gen = (Generator)Class.forName((String)generatorName).newInstance(); Generator gen = (Generator)Class.forName((String)generatorName).newInstance();

View File

@ -3,16 +3,16 @@ package org.gcube.dataanalysis.ecoengine.processing.factories;
import java.util.Properties; import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Model; import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
public class ModelersFactory { public class ModelersFactory {
public static Modeler getGenerator(GenericConfiguration config) throws Exception { public static Modeler getGenerator(AlgorithmConfiguration config) throws Exception {
try { try {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile); AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.modelsFile); Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.modelsFile);
String objectclass = p.getProperty(config.getModel() + "_MODELER"); String objectclass = p.getProperty(config.getModel() + "_MODELER");
Modeler g = (Modeler) Class.forName(objectclass).newInstance(); Modeler g = (Modeler) Class.forName(objectclass).newInstance();
String modelclass = p.getProperty(config.getModel()); String modelclass = p.getProperty(config.getModel());

View File

@ -1,12 +1,12 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions; package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
public class AquamapsNative2050 extends AquamapsNative { public class AquamapsNative2050 extends AquamapsNative {
public void init(GenericConfiguration config,SessionFactory dbHibConnection) { public void init(AlgorithmConfiguration config,SessionFactory dbHibConnection) {
super.init(config, dbHibConnection); super.init(config, dbHibConnection);
type = "2050"; type = "2050";
} }

View File

@ -7,7 +7,7 @@ import java.util.Queue;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
@ -29,15 +29,15 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
protected String type = null; protected String type = null;
@Override @Override
public void init(GenericConfiguration config,SessionFactory dbHibConnection) { public void init(AlgorithmConfiguration config,SessionFactory dbHibConnection) {
selectAllSpeciesQuery = String.format(selectAllSpeciesQuery, config.getEnvelopeTable()); selectAllSpeciesQuery = String.format(selectAllSpeciesQuery, config.getParam("EnvelopeTable"));
csquareCodeQuery = String.format(csquareCodeQuery, config.getCsquarecodesTable()); csquareCodeQuery = String.format(csquareCodeQuery, config.getParam("CsquarecodesTable"));
createTableStatement = String.format(createTableStatement,config.getDistributionTable()); createTableStatement = String.format(createTableStatement,config.getParam("DistributionTable"));
destinationTable = config.getDistributionTable(); destinationTable = config.getParam("DistributionTable");
core = new AquamapsAlgorithmCore(); core = new AquamapsAlgorithmCore();
if ((config.getPreprocessedTables()!=null)&&(config.getPreprocessedTables().size()>0)) if ((config.getParam("PreprocessedTable")!=null)&&(config.getParam("PreprocessedTable").length()>0))
hspenMinMaxLat = config.getPreprocessedTables().get(0); hspenMinMaxLat = config.getParam("PreprocessedTable");
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init(->getting min max latitudes from "+hspenMinMaxLat); AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init(->getting min max latitudes from "+hspenMinMaxLat);
@ -186,7 +186,8 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
@Override @Override
public ALG_PROPS[] getProperties() { public ALG_PROPS[] getProperties() {
ALG_PROPS [] p = {ALG_PROPS.SPECIES_VS_CSQUARE_FROM_DATABASE, ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE}; // ALG_PROPS [] p = {ALG_PROPS.SPECIES_VS_CSQUARE_FROM_DATABASE, ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE};
ALG_PROPS [] p = {ALG_PROPS.SPECIES_VS_CSQUARE_FROM_DATABASE};
return p; return p;
} }
@ -200,6 +201,22 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
return "Algorithm by Aquamaps"; return "Algorithm by Aquamaps";
} }
@Override
public HashMap<String, String> getInputParameters() {
HashMap<String, String> parameters = new HashMap<String,String>();
parameters.put("EnvelopeTable", "hspen");
parameters.put("CsquarecodesTable", "hcaf_d");
parameters.put("DistributionTable", "hspec_default");
parameters.put("PreprocessedTable", "maxminlat_hspen");
parameters.put("CreateTable", "hspen_minmaxlat");
parameters.put("DatabaseUserName","");
parameters.put("DatabasePassword","");
parameters.put("DatabaseURL","");
parameters.put("DatabaseDriver","");
return parameters;
}

View File

@ -1,12 +1,12 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions; package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
public class AquamapsSuitable2050 extends AquamapsSuitable { public class AquamapsSuitable2050 extends AquamapsSuitable {
public void init(GenericConfiguration config,SessionFactory dbHibConnection) { public void init(AlgorithmConfiguration config,SessionFactory dbHibConnection) {
super.init(config, dbHibConnection); super.init(config, dbHibConnection);
type = "2050"; type = "2050";
} }

View File

@ -4,13 +4,14 @@ import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.ObjectOutputStream; import java.io.ObjectOutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
@ -58,7 +59,7 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
} }
@Override @Override
public void init(GenericConfiguration config) { public void init(AlgorithmConfiguration config) {
AnalysisLogger.getLogger().trace("Dummy INIT"); AnalysisLogger.getLogger().trace("Dummy INIT");
randomElements = new ArrayList<String>(); randomElements = new ArrayList<String>();
for (int i=0;i<170000;i++) for (int i=0;i<170000;i++)
@ -187,4 +188,9 @@ public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
return null; return null;
} }
@Override
public HashMap<String, String> getInputParameters() {
return null;
}
} }

View File

@ -4,13 +4,14 @@ import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.ObjectOutputStream; import java.io.ObjectOutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric; import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
@ -23,7 +24,7 @@ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
String pers; String pers;
@Override @Override
public void init(GenericConfiguration config) { public void init(AlgorithmConfiguration config) {
pers = config.getPersistencePath(); pers = config.getPersistencePath();
} }
@ -134,5 +135,11 @@ public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
return null; return null;
} }
@Override
public HashMap<String, String> getInputParameters() {
// TODO Auto-generated method stub
return null;
}
} }

View File

@ -1,164 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.HashMap;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class RegressionTestsGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
// Generator generator = GeneratorsFactory.getGenerator(testConfigRemote());
// generate(generator);
System.out.println("TEST 2");
Generator generator = GeneratorsFactory.getGenerator(testConfigSuitable());
generate(generator);
/*
System.out.println("TEST 3");
generator = GeneratorsFactory.getGenerator(testConfigNative());
generate(generator);
System.out.println("TEST 4");
generator = GeneratorsFactory.getGenerator(testConfigSuitable2050());
generate(generator);
System.out.println("TEST 5");
generator = GeneratorsFactory.getGenerator(testConfigNative2050());
generate(generator);
*/
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
RegressionTestsGeneration tgs = new RegressionTestsGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static GenericConfiguration testConfigRemote() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_remote_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setModel("REMOTE_AQUAMAPS_SUITABLE");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setGeneralProperties(properties);
return config;
}
private static GenericConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE");
config.setGenerator("LOCAL_WITH_DATABASE");
return config;
}
private static GenericConfiguration testConfigNative() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE");
return config;
}
private static GenericConfiguration testConfigNative2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE_2050");
return config;
}
private static GenericConfiguration testConfigSuitable2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE_2050");
return config;
}
}

View File

@ -1,174 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class RegressionTestsGenerationList {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
// Generator generator = GeneratorsFactory.getGenerator(testConfigRemote());
// generate(generator);
System.out.println("TEST 2");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigSuitable());
Generator generator = generators.get(0);
generator.init();
generate(generator);
/*
System.out.println("TEST 3");
generator = GeneratorsFactory.getGenerator(testConfigNative());
generate(generator);
System.out.println("TEST 4");
generator = GeneratorsFactory.getGenerator(testConfigSuitable2050());
generate(generator);
System.out.println("TEST 5");
generator = GeneratorsFactory.getGenerator(testConfigNative2050());
generate(generator);
*/
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
RegressionTestsGenerationList tgs = new RegressionTestsGenerationList();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static GenericConfiguration testConfigRemote() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_remote_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setModel("REMOTE_AQUAMAPS_SUITABLE");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setGeneralProperties(properties);
return config;
}
private static GenericConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_micro");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setModel("AQUAMAPS_SUITABLE");
return config;
}
private static GenericConfiguration testConfigNative() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE");
return config;
}
private static GenericConfiguration testConfigNative2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE_2050");
return config;
}
private static GenericConfiguration testConfigSuitable2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE_2050");
return config;
}
}

View File

@ -1,93 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
public class RegressionTestsModeling {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
GenericConfiguration modelconfig = new GenericConfiguration();
modelconfig.setConfigPath("./cfg/");
modelconfig.setModel("HSPEN");
Modeler modeler = ModelersFactory.getGenerator(modelconfig);
Object input = testInputConfig();
Object output = testOutputConfig();
// modeler.model(input, null, output);
generate(modeler, input, output);
}
private static void generate(Modeler modeler, Object input , Object output) throws Exception {
if (modeler != null) {
RegressionTestsModeling tgs = new RegressionTestsModeling();
ThreadCalculator tc = tgs.new ThreadCalculator(modeler,input,output);
Thread t = new Thread(tc);
t.start();
while (modeler.getStatus() < 100) {
String resLoad = modeler.getResourceLoad();
String ress = modeler.getResources();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Modeling Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Modeler mg;
Object input;
Object output;
public ThreadCalculator(Modeler modeler, Object input ,Object output) {
this.mg = modeler;
this.input=input;
this.output=output;
}
public void run() {
try {
mg.model(input,null, output);
} catch (Exception e) {
}
}
}
private static GenericConfiguration testInputConfig() {
GenericConfiguration config = new GenericConfiguration();
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setOccurrenceCellsTable("occurrencecells");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
return config;
}
private static GenericConfiguration testOutputConfig() {
GenericConfiguration config = new GenericConfiguration();
config.setEnvelopeTable("hspen_validation_refactored");
return config;
}
}

View File

@ -0,0 +1,92 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class TestsMetaInfo {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("***TEST 1 - Get Algorithm Information***");
HashMap<String, String> map = GeneratorsFactory.getAlgorithmParameters("./cfg/","DUMMY");
System.out.println("input for DUMMY algorithm: "+map);
map = GeneratorsFactory.getAlgorithmParameters("./cfg/","AQUAMAPS_SUITABLE");
System.out.println("input for AQUAMAPS_SUITABLE algorithm: "+map);
System.out.println("\n***TEST 2 - Get Single Generator***");
Generator g = GeneratorsFactory.getGenerator(testConfig());
System.out.println("Found generator "+g);
System.out.println("\n***TEST 3 - Get All Algorithms ***");
System.out.println("Algs: "+GeneratorsFactory.getProbabilityDistributionAlgorithms("./cfg/"));
System.out.println("\n***TEST 4 - Get All Generators ***");
System.out.println("Gens: "+GeneratorsFactory.getAllGenerators("./cfg/"));
System.out.println("\n***TEST 5 - Get All Models to be trained ***");
System.out.println("Models: "+GeneratorsFactory.getModels("./cfg/"));
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
TestsMetaInfo tgs = new TestsMetaInfo();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(2);
config.setModel("TEST");
config.setGenerator("SIMPLE_LOCAL");
return config;
}
}

View File

@ -3,7 +3,7 @@ package org.gcube.dataanalysis.ecoengine.test;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory; import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -16,7 +16,7 @@ public class TestsTESTGeneration {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
System.out.println("TEST 1"); System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigSuitable()); List<Generator> generators = GeneratorsFactory.getGenerators(testConfig());
generators.get(0).init(); generators.get(0).init();
generate(generators.get(0)); generate(generators.get(0));
generators = null; generators = null;
@ -65,8 +65,8 @@ public static void main(String[] args) throws Exception {
} }
private static GenericConfiguration testConfigSuitable() { private static AlgorithmConfiguration testConfig() {
GenericConfiguration config = new GenericConfiguration(); AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/"); config.setConfigPath("./cfg/");
config.setPersistencePath("./"); config.setPersistencePath("./");
config.setNumberOfResources(2); config.setNumberOfResources(2);

View File

@ -1,9 +1,7 @@
package org.gcube.dataanalysis.ecoengine.test.generations; package org.gcube.dataanalysis.ecoengine.test.generations;
import java.util.HashMap;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory; import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
@ -64,18 +62,20 @@ public class GenerationHSPECValidation {
} }
private static GenericConfiguration testConfigSuitable() { private static AlgorithmConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration(); AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setDistributionTable("hspec_validation_automatic");
config.setConfigPath("./cfg/"); config.setParam("DistributionTable","hspec_validation_automatic");
config.setCsquarecodesTable("hcaf_d"); config.setParam("ConfigPath","./cfg/");
config.setEnvelopeTable("hspen_validation"); config.setParam("CsquarecodesTable","hcaf_d");
config.setCreateTable(true); config.setParam("EnvelopeTable","hspen_validation");
config.setParam("CreateTable","true");
config.setNumberOfResources(2); config.setNumberOfResources(2);
config.setDatabaseUserName("gcube"); config.setParam("DatabaseUserName","gcube");
config.setDatabasePassword("d4science2"); config.setParam("DatabasePassword","d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb"); config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE"); config.setModel("AQUAMAPS_SUITABLE");
return config; return config;
} }

View File

@ -0,0 +1,129 @@
package org.gcube.dataanalysis.ecoengine.test.regressions;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class RegressionComplexGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigRemote());
generators.get(0).init();
generate(generators.get(0));
generators = null;
System.out.println("TEST 2");
generators = GeneratorsFactory.getGenerators(testConfigLocal());
generators.get(0).init();
generate(generators.get(0));
generators = null;
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
RegressionComplexGeneration tgs = new RegressionComplexGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static AlgorithmConfiguration testConfigRemote() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(5);
config.setModel("REMOTE_AQUAMAPS_SUITABLE");
config.setParam("DistributionTable","hspec_suitable_remote_test");
config.setParam("CsquarecodesTable","hcaf_d");
config.setParam("EnvelopeTable","hspen_micro");
config.setParam("CreateTable","true");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://146.48.87.169/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("RemoteCalculator","http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setParam("ServiceUserName","gianpaolo.coro");
config.setParam("RemoteEnvironment","windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.addGeneralProperties(properties);
return config;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(5);
config.setModel("AQUAMAPS_SUITABLE");
config.setParam("DistributionTable","hspec_suitable_test");
config.setParam("CsquarecodesTable","hcaf_d");
config.setParam("EnvelopeTable","hspen_micro");
config.setParam("PreprocessedTable", "maxminlat_hspen");
config.setParam("CreateTable","true");
config.setParam("DatabaseUserName","gcube");
config.setParam("DatabasePassword","d4science2");
config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.addGeneralProperties(properties);
return config;
}
}

View File

@ -1,11 +1,13 @@
package org.gcube.dataanalysis.ecoengine.test; package org.gcube.dataanalysis.ecoengine.test.regressions;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator; import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory; import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class TestsDummyGeneration { public class RegressionSimpleGeneration {
/** /**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species * example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
* *
@ -14,16 +16,24 @@ public class TestsDummyGeneration {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
System.out.println("TEST 1"); System.out.println("TEST 1");
Generator generator = GeneratorsFactory.getGenerator(testConfigSuitable()); List<Generator> generators = GeneratorsFactory.getGenerators(testConfig1());
generate(generator); generators.get(0).init();
generate(generators.get(0));
generators = null;
System.out.println("TEST 2");
generators = GeneratorsFactory.getGenerators(testConfig2());
generators.get(0).init();
generate(generators.get(0));
generators = null;
} }
private static void generate(Generator generator) throws Exception { private static void generate(Generator generator) throws Exception {
if (generator != null) { if (generator != null) {
TestsDummyGeneration tgs = new TestsDummyGeneration(); RegressionSimpleGeneration tgs = new RegressionSimpleGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator); ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc); Thread t = new Thread(tc);
t.start(); t.start();
@ -62,13 +72,22 @@ public static void main(String[] args) throws Exception {
} }
private static GenericConfiguration testConfigSuitable() { private static AlgorithmConfiguration testConfig1() {
GenericConfiguration config = new GenericConfiguration(); AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/"); config.setConfigPath("./cfg/");
config.setPersistencePath("./"); config.setPersistencePath("./");
config.setNumberOfResources(2); config.setNumberOfResources(5);
config.setModel("DUMMY"); config.setModel("TEST");
return config; return config;
} }
private static AlgorithmConfiguration testConfig2() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(5);
config.setModel("DUMMY");
return config;
}
} }

View File

@ -1,69 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestLocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestLocalGenerationStatus tgs = new RegressionTestLocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -1,69 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestNative2050LocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on native species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestNative2050LocalGenerationStatus tgs = new RegressionTestNative2050LocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_native_2050_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE_2050");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -1,69 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestNativeLocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on native species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestNativeLocalGenerationStatus tgs = new RegressionTestNativeLocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_native_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -1,83 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator;
public class RegressionTestRemoteGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestRemoteGenerationStatus tgs = new RegressionTestRemoteGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setModel("REMOTE_AQUAMAPS_SUITABLE");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
//new parameters
config.setRemoteEnvironment("windows azure");
HashMap<String,String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setGeneralProperties(properties);
RainyCloudGenerator generator = new RainyCloudGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(1000);
}
}
public class ThreadCalculator implements Runnable {
RainyCloudGenerator dg ;
public ThreadCalculator(RainyCloudGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -1,69 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestSuitable2050LocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on native species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestSuitable2050LocalGenerationStatus tgs = new RegressionTestSuitable2050LocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_suitable_2050_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE_2050");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -1,7 +1,7 @@
package org.gcube.dataanalysis.ecoengine.test.tables; package org.gcube.dataanalysis.ecoengine.test.tables;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.HpecDiscrepanciesCalculator; import org.gcube.dataanalysis.ecoengine.utils.HspecDiscrepanciesCalculator;
public class CompareHspecSingle { public class CompareHspecSingle {
@ -9,19 +9,19 @@ public class CompareHspecSingle {
String configPath = "./cfg/"; String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration(); AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local"); config.setParam("DistributionTable","hspec_suitable_automatic_local");
config.setConfigPath("./cfg/"); config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d"); config.setParam("CsquarecodesTable","hcaf_d");
config.setEnvelopeTable("hspen_micro"); config.setParam("EnvelopeTable","hspen_micro");
// config.setEnvelopeTable("hspen"); // config.setEnvelopeTable("hspen");
config.setCreateTable(true); config.setParam("CreateTable","true");
config.setNumberOfResources(2); config.setNumberOfResources(2);
config.setDatabaseUserName("gcube"); config.setParam("DatabaseUserName","gcube");
config.setDatabasePassword("d4science2"); config.setParam("DatabasePassword","d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb"); config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
HpecDiscrepanciesCalculator ec = new HpecDiscrepanciesCalculator(config); HspecDiscrepanciesCalculator ec = new HspecDiscrepanciesCalculator(config);
ec.referenceTable = "hspec_validation where probability>0.19"; ec.referenceTable = "hspec_validation where probability>0.19";
ec.analyzedTable = "hspec_suitable_automatic_local"; ec.analyzedTable = "hspec_suitable_automatic_local";

View File

@ -1,6 +1,6 @@
package org.gcube.dataanalysis.ecoengine.test.tables; package org.gcube.dataanalysis.ecoengine.test.tables;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.TablesDiscrepanciesCalculator; import org.gcube.dataanalysis.ecoengine.utils.TablesDiscrepanciesCalculator;
public class CompareTables { public class CompareTables {
@ -9,18 +9,17 @@ public class CompareTables {
String configPath = "./cfg/"; String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration(); AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local"); config.setParam("DistributionTable","hspec_suitable_automatic_local");
config.setConfigPath("./cfg/"); config.setParam("ConfigPath","./cfg/");
config.setCsquarecodesTable("hcaf_d"); config.setParam("CsquarecodesTable","hcaf_d");
config.setEnvelopeTable("hspen_micro"); config.setParam("EnvelopeTable","hspen_micro");
// config.setEnvelopeTable("hspen"); // config.setEnvelopeTable("hspen");
config.setCreateTable(true); config.setParam("CreateTable","true");
config.setNumberOfResources(2); config.setNumberOfResources(2);
config.setDatabaseUserName("gcube"); config.setParam("DatabaseUserName","gcube");
config.setDatabasePassword("d4science2"); config.setParam("DatabasePassword","d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb"); config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
TablesDiscrepanciesCalculator ec = new TablesDiscrepanciesCalculator(config); TablesDiscrepanciesCalculator ec = new TablesDiscrepanciesCalculator(config);
ec.referenceTable = "hspec_suitable_automatic_local"; ec.referenceTable = "hspec_suitable_automatic_local";

View File

@ -4,7 +4,7 @@ import java.math.BigInteger;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
@ -12,7 +12,7 @@ import org.hibernate.SessionFactory;
* checks if two tables are equal * checks if two tables are equal
* checks numbers at the second decimal position * checks numbers at the second decimal position
*/ */
public class HpecDiscrepanciesCalculator { public class HspecDiscrepanciesCalculator {
private BigInteger numOfElements; private BigInteger numOfElements;
private int errorCounter; private int errorCounter;
@ -41,11 +41,11 @@ public class HpecDiscrepanciesCalculator {
//init connections //init connections
public HpecDiscrepanciesCalculator(GenericConfiguration config) throws Exception { public HspecDiscrepanciesCalculator(AlgorithmConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + LogFile); AnalysisLogger.setLogger(config.getConfigPath() + LogFile);
referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config); referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); AnalysisLogger.getLogger().debug("ReferenceDB initialized");
destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config); destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("OriginalDB initialized"); AnalysisLogger.getLogger().debug("OriginalDB initialized");
} }
@ -90,20 +90,20 @@ public class HpecDiscrepanciesCalculator {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
String configPath = "./cfg/"; String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration(); AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local"); config.setParam("DistributionTable","hspec_suitable_automatic_local");
config.setConfigPath("./cfg/"); config.setParam("ConfigPath","./cfg/");
config.setCsquarecodesTable("hcaf_d"); config.setParam("CsquarecodesTable","hcaf_d");
config.setEnvelopeTable("hspen_micro"); config.setParam("EnvelopeTable","hspen_micro");
// config.setEnvelopeTable("hspen"); // config.setEnvelopeTable("hspen");
config.setCreateTable(true); config.setParam("CreateTable","true");
config.setNumberOfResources(2); config.setNumberOfResources(2);
config.setDatabaseUserName("gcube"); config.setParam("DatabaseUserName","gcube");
config.setDatabasePassword("d4science2"); config.setParam("DatabasePassword","d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb"); config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
HpecDiscrepanciesCalculator ec = new HpecDiscrepanciesCalculator(config); HspecDiscrepanciesCalculator ec = new HspecDiscrepanciesCalculator(config);
long t0 = System.currentTimeMillis(); long t0 = System.currentTimeMillis();
ec.runTest(); ec.runTest();
long t1 = System.currentTimeMillis(); long t1 = System.currentTimeMillis();

View File

@ -4,7 +4,7 @@ import java.math.BigInteger;
import java.util.List; import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
@ -40,11 +40,11 @@ public class TablesDiscrepanciesCalculator {
//init connections //init connections
public TablesDiscrepanciesCalculator(GenericConfiguration config) throws Exception { public TablesDiscrepanciesCalculator(AlgorithmConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + LogFile); AnalysisLogger.setLogger(config.getConfigPath() + LogFile);
referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config); referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized"); AnalysisLogger.getLogger().debug("ReferenceDB initialized");
destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config); destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("OriginalDB initialized"); AnalysisLogger.getLogger().debug("OriginalDB initialized");
} }
@ -81,17 +81,17 @@ public class TablesDiscrepanciesCalculator {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
String configPath = "./cfg/"; String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration(); AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local"); config.setParam("DistributionTable","hspec_suitable_automatic_local");
config.setConfigPath("./cfg/"); config.setParam("ConfigPath","./cfg/");
config.setCsquarecodesTable("hcaf_d"); config.setParam("CsquarecodesTable","hcaf_d");
config.setEnvelopeTable("hspen_micro"); config.setParam("EnvelopeTable","hspen_micro");
// config.setEnvelopeTable("hspen"); // config.setEnvelopeTable("hspen");
config.setCreateTable(true); config.setParam("CreateTable","true");
config.setNumberOfResources(2); config.setNumberOfResources(2);
config.setDatabaseUserName("gcube"); config.setParam("DatabaseUserName","gcube");
config.setDatabasePassword("d4science2"); config.setParam("DatabasePassword","d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb"); config.setParam("DatabaseURL","jdbc:postgresql://localhost/testdb");
TablesDiscrepanciesCalculator ec = new TablesDiscrepanciesCalculator(config); TablesDiscrepanciesCalculator ec = new TablesDiscrepanciesCalculator(config);
long t0 = System.currentTimeMillis(); long t0 = System.currentTimeMillis();