This commit is contained in:
Gianpaolo Coro 2012-10-19 16:36:47 +00:00
parent 54ba7bb3e5
commit 8cbf46bdd1
31 changed files with 932 additions and 64 deletions

BIN
cfg/1neuralnetwork Normal file

Binary file not shown.

View File

@ -4,3 +4,4 @@ AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.Aquam
AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable2050
AQUAMAPS_NATIVE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNN
AQUAMAPS_SUITABLE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNNSuitable
FEED_FORWARD_A_N_N_DISTRIBUTION=org.gcube.dataanalysis.ecoengine.spatialdistributions.FeedForwardNeuralNetworkDistribution

View File

@ -1,2 +1,3 @@
HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN
AQUAMAPSNN=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNN
AQUAMAPSNN=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNN
FEED_FORWARD_ANN=org.gcube.dataanalysis.ecoengine.models.FeedForwardNN

View File

@ -138,14 +138,21 @@ public class InterpolateTables {
for (int i = 0; i < producedfiles.length; i++) {
String filename = producedfiles[i].getName();
filename = filename.substring(0, filename.lastIndexOf("."));
filename = filename.substring(0, filename.lastIndexOf(".")).replace(" ", "");
interpolatedTables[i + 1] = filename;
/*create Table from file*/
/* OLD CODE FOR LOCAL DB
String copyFileQuery = DatabaseUtils.copyFileToTableStatement(temporaryDirectory + producedfiles[i].getName(), filename);
// create Table
AnalysisLogger.getLogger().debug("CREATING TABLE->" + filename);
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, filename), referencedbConnection);
AnalysisLogger.getLogger().debug("FULFILLING TABLE->" + filename + ": " + copyFileQuery);
DatabaseFactory.executeSQLUpdate(copyFileQuery, referencedbConnection);
*/
AnalysisLogger.getLogger().debug("CREATING TABLE->" + filename);
DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, filename), referencedbConnection);
AnalysisLogger.getLogger().debug("COPYING TABLE->" + filename);
DatabaseUtils.createRemoteTableFromFile(producedfiles[i].getAbsolutePath(),filename,";",false,config.getDatabaseUserName(),config.getDatabasePassword(),config.getDatabaseURL());
status = Math.min(status + statusstep, 99);
}

View File

@ -9,7 +9,7 @@ import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public interface SpatialProbabilityDistributionGeneric extends SpatialProbabilityDistribution{
//initialization of the distribution model
public void init(AlgorithmConfiguration config);
public void init(AlgorithmConfiguration config) throws Exception;
public String getMainInfoType();
@ -33,7 +33,7 @@ public interface SpatialProbabilityDistributionGeneric extends SpatialProbabilit
public void postProcess();
//store the result of the probability distribution model: e.g. for the input species -> csquare , probability
public void storeDistribution(Map<Object,Map<Object,Float>> distribution);
public void storeDistribution(Map<Object,Map<Object,Float>> distribution) throws Exception;
//get the internal processing status for the single step calculation
public float getInternalStatus();

View File

@ -0,0 +1,205 @@
package org.gcube.dataanalysis.ecoengine.models;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
public class FeedForwardNN extends ModelAquamapsNN{
@Override
public String getName() {
return "FEED_FORWARD_ANN";
}
@Override
public String getDescription() {
return "A Neural Networks to be trained on features of Real values";
}
@Override
public float getStatus() {
if (status==100)
return status;
else if ((nn!=null) && (nn.status>0))
return nn.status*100f;
else
return status;
}
protected static String TrainingDataSet = "TrainingDataSet";
protected String trainingDataSet;
protected static String TrainingDataSetColumns = "TrainingColumns";
protected String trainingDataSetColumns;
protected static String TrainingDataSetTargetColumn = "TargetColumn";
protected String trainingColumn;
protected String dbcolumns;
protected String[] dbcolumnsList;
protected static String LayersNeurons = "LayersNeurons";
protected static String Reference = "Reference";
protected static String LearningThreshold = "LearningThreshold";
protected static String MaxIterations = "MaxIterations";
protected static String ModelName = "ModelName";
protected static String UserName= "UserName";
protected float learningThr;
protected int maxiter;
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
List<TableTemplates> templatesOccurrences = new ArrayList<TableTemplates>();
templatesOccurrences.add(TableTemplates.GENERIC);
InputTable p1 = new InputTable(templatesOccurrences,TrainingDataSet,"a table containing real values colums for training the ANN");
ColumnTypesList p2 = new ColumnTypesList(TrainingDataSet, TrainingDataSetColumns, "column names to use as features vectors", false);
ColumnType p3 = new ColumnType(TrainingDataSet, TrainingDataSetTargetColumn, "the column to use as target", "probability", false);
PrimitiveTypesList p4 = new PrimitiveTypesList(Integer.class.getName(), PrimitiveTypes.NUMBER,LayersNeurons,"a list of neurons number for each inner layer",true);
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, Reference,"the phenomenon this ANN is trying to model - can be a generic identifier. Put 1 for not specifying","1");
PrimitiveType p6 = new PrimitiveType(Float.class.getName(), null, PrimitiveTypes.NUMBER, LearningThreshold,"the learning threshold for this ANN","0.01");
PrimitiveType p7 = new PrimitiveType(Integer.class.getName(), null, PrimitiveTypes.NUMBER, MaxIterations,"the maximum number of iterations in the training","100");
PrimitiveType p11 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, ModelName,"The name of this Neural Network - insert without spaces","neuralnet_");
ServiceType p10 = new ServiceType(ServiceParameters.USERNAME, UserName,"LDAP username");
parameters.add(p1);
parameters.add(p2);
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
parameters.add(p6);
parameters.add(p7);
parameters.add(p11);
parameters.add(p10);
DatabaseType.addDefaultDBPars(parameters);
return parameters;
}
@Override
public void init(AlgorithmConfiguration config, Model previousModel) {
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
// init the database
try {
connection = DatabaseUtils.initDBSession(config);
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().trace("ERROR initializing connection");
}
fileName = config.getPersistencePath()+Neural_Network.generateNNName(config.getParam(Reference), config.getParam(UserName), config.getParam(ModelName));
trainingDataSet = config.getParam(TrainingDataSet);
trainingDataSetColumns = config.getParam(TrainingDataSetColumns);
trainingColumn = config.getParam(TrainingDataSetTargetColumn);
learningThr = Float.parseFloat(config.getParam(LearningThreshold));
maxiter = Integer.parseInt(config.getParam(MaxIterations));
String layersNeurons$ = config.getParam(LayersNeurons);
if ((layersNeurons$!=null)&&(layersNeurons$.length()>0))
{
String [] split = layersNeurons$.split(AlgorithmConfiguration.getListSeparator());
layersNeurons = new int[split.length];
boolean nullhyp=true;
for (int i = 0;i<split.length;i++){
layersNeurons[i] = Integer.parseInt(split[i]);
if ((layersNeurons[i]>0)&&(nullhyp))
nullhyp=false;
}
if (nullhyp)
layersNeurons=null;
}
dbcolumns = "";
dbcolumnsList = trainingDataSetColumns.split(AlgorithmConfiguration.getListSeparator());
for (int i=0;i<dbcolumnsList.length;i++){
dbcolumns+=dbcolumnsList[i];
if (i<dbcolumnsList.length-1)
dbcolumns+=",";
}
}
private String takeElementsQuery = "select %1$s from %2$s d order by %3$s";
protected Neural_Network nn;
protected double maxfactor=1;
protected double minfactor=0;
@Override
public void train(AlgorithmConfiguration Input, Model previousModel) {
try {
// take all features input vectors
String query = String.format(takeElementsQuery, trainingColumn+","+dbcolumns,trainingDataSet,trainingColumn);
AnalysisLogger.getLogger().debug("Query to execute: "+query);
List<Object> features = DatabaseFactory.executeSQLQuery(query, connection);
int numbOfFeatures = features.size();
//get reduction factor for normalizing the outputs
List<Object> maxmin = DatabaseFactory.executeSQLQuery("select max("+trainingColumn+"), min("+trainingColumn+") from "+trainingDataSet, connection);
maxfactor = Double.parseDouble(""+((Object[])maxmin.get(0))[0]);
minfactor = Double.parseDouble(""+((Object[])maxmin.get(0))[1]);
AnalysisLogger.getLogger().debug("Calculated max: "+maxfactor+" min: "+minfactor);
// setup Neural Network
int numberOfInputNodes = dbcolumnsList.length;
int numberOfOutputNodes = 1;
AnalysisLogger.getLogger().debug("Training the ANN with "+numbOfFeatures+" training data and "+numberOfInputNodes+" inputs");
if (layersNeurons!=null){
int[] innerLayers = Neural_Network.setupInnerLayers(layersNeurons);
nn = new Neural_Network(numberOfInputNodes, numberOfOutputNodes, innerLayers, Neural_Network.ACTIVATIONFUNCTION.SIGMOID);
}
else
nn = new Neural_Network(numberOfInputNodes, numberOfOutputNodes, Neural_Network.ACTIVATIONFUNCTION.SIGMOID);
nn.maxfactor=maxfactor;
nn.minfactor=minfactor;
nn.setThreshold(learningThr);
nn.setCycles(maxiter);
AnalysisLogger.getLogger().debug("Features preprocessing");
double[][] in = new double[numbOfFeatures][];
double[][] out = new double[numbOfFeatures][];
// build NN input
for (int i = 0; i < numbOfFeatures; i++) {
// out[i] = new double[0];
Object[] feats = (Object[]) features.get(i);
in[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 1, feats.length));
out[i] = Neural_Network.preprocessObjects(Arrays.copyOfRange((Object[]) features.get(i), 0, 1));
//apply reduction factor
// AnalysisLogger.getLogger().debug("Output Transformed from "+out[i][0]);
out[i][0] =nn.getCorrectValueForOutput(out[i][0]);
// AnalysisLogger.getLogger().debug("To "+out[i][0]);
}
AnalysisLogger.getLogger().debug("Features were correctly preprocessed - Training");
// train the NN
nn.train(in, out);
AnalysisLogger.getLogger().debug("Saving Network");
save(fileName, nn);
AnalysisLogger.getLogger().debug("Done");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().error("ERROR during training");
}
status = 100f;
}
}

View File

@ -54,7 +54,7 @@ public class ModelAquamapsNN implements Model {
InputTable p1 = new InputTable(templatesOccurrences,"AbsenceDataTable","A Table containing absence points");
InputTable p2 = new InputTable(templatesOccurrences,"PresenceDataTable","A Table containing positive occurrences");
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "SpeciesName","Species Code of the fish the NN will correspond to","Fis-10407");
PrimitiveTypesList p4 = new PrimitiveTypesList(Integer.class.getName(), PrimitiveTypes.NUMBER,"LayersNeurons","a list of neurons number for each inner layer separated by comma",false);
PrimitiveTypesList p4 = new PrimitiveTypesList(Integer.class.getName(), PrimitiveTypes.NUMBER,"LayersNeurons","a list of neurons number for each inner layer",false);
PrimitiveType p11 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, "NeuralNetworkName","The name of this Neural Network - insert without spaces","neuralnet_");
DatabaseType p5 = new DatabaseType(DatabaseParameters.DATABASEUSERNAME, "DatabaseUserName", "db user name");
@ -89,12 +89,12 @@ public class ModelAquamapsNN implements Model {
public void setVersion(float version) {
}
SessionFactory connection;
String fileName;
String presenceTable;
String absenceTable;
float status;
int[] layersNeurons = {100, 2};
protected SessionFactory connection;
protected String fileName;
protected String presenceTable;
protected String absenceTable;
protected float status;
protected int[] layersNeurons = {100, 2};
@Override
public void init(AlgorithmConfiguration Input, Model previousModel) {

View File

@ -1,19 +1,37 @@
package org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class Neural_Network implements Serializable {
Neuron[][] griglia;
static final long serialVersionUID = 1;
// originale = 1.2
double soglia = 0.001;
double maxcycle = 1000;
// double soglia = 0.001;
// double maxcycle = 1000;
double soglia = 0.01;
double maxcycle = 100;
double acceptanceThr = 0.5;
public double maxfactor=1;
public double minfactor=0;
public void setThreshold(double soglia) {
this.soglia = soglia;
}
public double getCorrectValueFromOutput(double prob){
return prob*maxfactor+(1-prob)*minfactor;
}
public double getCorrectValueForOutput(double output){
return (double)(output-minfactor)/(maxfactor-minfactor);
}
public void setAcceptanceThreshold(double treshold) {
this.acceptanceThr = treshold;
}
@ -67,6 +85,19 @@ public class Neural_Network implements Serializable {
return out;
}
public int getNumberOfOutputs(){
if (griglia!=null)
return griglia[griglia.length - 1].length;
else
return 0;
}
public int getNumberOfInputs(){
if (griglia!=null)
return griglia[0].length;
else
return 0;
}
public double[] getPositiveCase() {
double[] out = new double[0];
@ -184,8 +215,8 @@ public class Neural_Network implements Serializable {
if (input.length == griglia[0].length - 1)
return prop(input, 0);
else
System.out.println("Error : number of inputs not valid!");
return null;
}
@ -304,14 +335,15 @@ public class Neural_Network implements Serializable {
}
}
}
public float status=0;
public void train(double[][] inputvet, double[][] correctoutputvet) {
if (griglia[griglia.length - 1].length != correctoutputvet[0].length)
System.out.println("Error : the vector of outputs has not a lenght equal to the output of the network");
AnalysisLogger.getLogger().debug("Error : the vector of outputs has not a lenght equal to the output of the network");
else {
double en = 2;
int counter = 0;
double enprec=2;
while ((en > soglia) && (counter <= maxcycle)) {
en = 0;
for (int i = 0; i < inputvet.length; i++) {
@ -319,15 +351,23 @@ public class Neural_Network implements Serializable {
this.BProp(inputvet[i], correctoutputvet[i]);
en += energy(this.propagate(inputvet[i]), correctoutputvet[i]);
}
System.out.println("Learning Score: " + en);
AnalysisLogger.getLogger().debug("Learning Score: " + en);
counter++;
status = (float)counter/(float)maxcycle;
if (en==enprec)
break;
else
enprec=en;
}
System.out.println("Scarto Finale: " + en);
if (counter >= maxcycle)
System.out.println("training incompleto: non sono riuscito ridurre l'errore sotto la soglia!");
AnalysisLogger.getLogger().debug("training incomplete: didn't manage to reduce the error under the thr!");
else
System.out.println("training completo!");
AnalysisLogger.getLogger().debug("training complete!");
status = 100;
}
}
@ -360,6 +400,30 @@ public class Neural_Network implements Serializable {
return o;
}
public static synchronized Neural_Network loadNN(String nomeFile) {
Neural_Network nn = null;
FileInputStream stream = null;
try {
stream = new FileInputStream(nomeFile);
ObjectInputStream ois = new ObjectInputStream(stream);
nn = (Neural_Network) ois.readObject();
} catch (Exception ex) {
ex.printStackTrace();
AnalysisLogger.getLogger().debug("Error in reading the object from file " + nomeFile + " .");
} finally {
try {
stream.close();
} catch (IOException e) {
}
}
return nn;
}
public static String generateNNName(String referenceEntity,String username,String neuralNetName){
return referenceEntity+"_"+username+"_"+neuralNetName;
}
public static void main(String[] args) {
int[] t = { 2 };

View File

@ -24,23 +24,23 @@ import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributio
public class LocalSimpleSplitGenerator implements Generator {
private AlgorithmConfiguration config;
private ExecutorService executorService;
private int numberOfThreadsToUse;
private boolean threadActivity[];
private boolean stopInterrupt;
private SpatialProbabilityDistributionGeneric distributionModel;
private int processedSpeciesCounter;
private int spaceVectorsNumber;
private List<Object> environmentVectors;
private long lastTime;
private int lastProcessedRecordsNumber;
private int processedRecordsCounter;
private float status;
private int chunksize;
ConcurrentLinkedQueue<String> probabilityBuffer;
protected AlgorithmConfiguration config;
protected ExecutorService executorService;
protected int numberOfThreadsToUse;
protected boolean threadActivity[];
protected boolean stopInterrupt;
protected SpatialProbabilityDistributionGeneric distributionModel;
protected int processedSpeciesCounter;
protected int spaceVectorsNumber;
protected List<Object> environmentVectors;
protected long lastTime;
protected int lastProcessedRecordsNumber;
protected int processedRecordsCounter;
protected float status;
protected int chunksize;
protected ConcurrentLinkedQueue<String> probabilityBuffer;
//species Objects -> (geographical Object , Probability)
ConcurrentHashMap<Object,Map<Object,Float>> completeDistribution;
protected ConcurrentHashMap<Object,Map<Object,Float>> completeDistribution;
public LocalSimpleSplitGenerator() {
}
@ -105,7 +105,7 @@ public class LocalSimpleSplitGenerator implements Generator {
}
private void initModel() throws Exception {
protected void initModel() throws Exception {
Properties p = AlgorithmConfiguration.getProperties(config.getConfigPath() + AlgorithmConfiguration.algorithmsFile);
String objectclass = p.getProperty(config.getModel());
distributionModel = (SpatialProbabilityDistributionGeneric) Class.forName(objectclass).newInstance();
@ -157,17 +157,20 @@ public class LocalSimpleSplitGenerator implements Generator {
// INITIALIZATION
long tstart = System.currentTimeMillis();
try {
AnalysisLogger.getLogger().trace("generate->Take geographical information reference");
AnalysisLogger.getLogger().trace("generate->Take features reference");
// take the area reference vectors
environmentVectors = distributionModel.getGeographicalInfoObjects();
AnalysisLogger.getLogger().trace("generate->Take species references");
List<Object> speciesVectors = distributionModel.getMainInfoObjects();
AnalysisLogger.getLogger().trace("generate->got all information");
if ((environmentVectors ==null) ||(environmentVectors.size()==0))
throw new Exception("Empty Features Set");
// calculate the number of chunks needed
spaceVectorsNumber = environmentVectors.size();
int speciesVectorNumber = speciesVectors.size();
AnalysisLogger.getLogger().trace("generate->Features to calc: "+spaceVectorsNumber);
AnalysisLogger.getLogger().trace("generate->Take groups references");
List<Object> speciesVectors = distributionModel.getMainInfoObjects();
int speciesVectorNumber = speciesVectors.size();
AnalysisLogger.getLogger().trace("generate->Number of groups of features: "+speciesVectorNumber);
// calculate number of chunks to take into account
chunksize = spaceVectorsNumber / numberOfThreadsToUse;
if (chunksize == 0)
@ -176,7 +179,7 @@ public class LocalSimpleSplitGenerator implements Generator {
if ((spaceVectorsNumber % chunksize) != 0)
numOfChunks += 1;
AnalysisLogger.getLogger().trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " species - chunk size will be "+chunksize);
AnalysisLogger.getLogger().trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " groups - chunk size will be "+chunksize);
// initialize threads
initializeThreads();
@ -252,11 +255,19 @@ public class LocalSimpleSplitGenerator implements Generator {
throw e;
} finally {
// REPORT OVERALL ELAPSED TIME
distributionModel.postProcess();
AnalysisLogger.getLogger().trace("generate-> Storing Probability Distribution");
distributionModel.storeDistribution(completeDistribution);
try{
distributionModel.storeDistribution(completeDistribution);
}catch(Exception ee){
AnalysisLogger.getLogger().trace("generate-> Error Storing Probability Distribution ",ee);
}
try{
distributionModel.postProcess();
}catch(Exception eee){}
try{
// shutdown all
shutdown();
}catch(Exception eeee){}
long tend = System.currentTimeMillis();
long ttotal = tend - tstart;
AnalysisLogger.getLogger().warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n");
@ -299,17 +310,16 @@ public class LocalSimpleSplitGenerator implements Generator {
}
for (int i = spaceindex; i < max; i++) {
float prob = distributionModel.calcProb(speciesVector, environmentVectors.get(i));
Object enfeatures = environmentVectors.get(i);
float prob = distributionModel.calcProb(speciesVector, enfeatures);
// String geographicalID = distributionModel.getGeographicalID(environmentVectors.get(i));
if (prob > 0.1) {
//record the overall probability distribution
geoDistrib.put(environmentVectors.get(i), prob);
}
//record the overall probability distribution
geoDistrib.put(enfeatures, prob);
processedRecordsCounter++;
}
AnalysisLogger.getLogger().trace("FINISHED");
threadActivity[threadIndex] = false;
return 0;
}

View File

@ -148,7 +148,7 @@ public class ProcessorsFactory {
map.put("MODELS", ModelersFactory.getModels(configPath));
map.put("EVALUATORS", EvaluatorsFactory.getAllEvaluators(configPath));
map.put("CLUSTERERS", ClusterersFactory.getAllClusterers(configPath));
map.put("TRANSDUCERERS", TransducerersFactory.getAllTransducerers(configPath));
map.put("TRANSDUCERS", TransducerersFactory.getAllTransducerers(configPath));
return map;
}

View File

@ -0,0 +1,241 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.hibernate.SessionFactory;
public abstract class BayesianDistribution implements SpatialProbabilityDistributionGeneric {
protected static String FeaturesTableP = "FeaturesTable";
protected static String FeaturesTableColumnsP = "FeaturesColumnNames";
protected static String FinalTableLabel = "FinalTableLabel";
protected static String FinalTableName = "FinalTableName";
protected static String FinalTableValue = "tvalue";
protected static String FinalTableValueType = "real";
protected static String GroupingFactor = "GroupingFactor";
protected static String ModelName = "ModelName";
protected static String UserName = "UserName";
protected float status = 10f;
protected String featuresTable;
protected String featuresTableColumns;
protected String finalTableName;
protected String finalTableLabel;
protected String modelName;
protected String userName;
protected String groupingFactor;
protected SessionFactory dbConnection;
protected AlgorithmConfiguration config;
@Override
public String getMainInfoType() {
return String.class.getName();
}
@Override
public String getGeographicalInfoType() {
return Object[].class.getName();
}
@Override
public List<Object> getMainInfoObjects() {
if (groupingFactor.length()==0){
List<Object> lo = new ArrayList<Object>();
lo.add("1");
return lo;
}
else
return DatabaseFactory.executeSQLQuery("select distinct " + groupingFactor + " from " + featuresTableColumns, dbConnection);
}
@Override
public List<Object> getGeographicalInfoObjects() {
return DatabaseFactory.executeSQLQuery("select distinct " + featuresTableColumns + " from " + featuresTable, dbConnection);
}
@Override
public void storeDistribution(Map<Object, Map<Object, Float>> distribution) throws Exception {
StringBuffer sb = new StringBuffer();
int distribscounter=0;
int distrsize=distribution.size();
for (Object key : distribution.keySet()) {
Map<Object, Float> innerdistrib = distribution.get(key);
int counter=0;
int innerdistrsize = innerdistrib.size();
for (Object vector : innerdistrib.keySet()) {
float value = innerdistrib.get(vector);
Object[] elements = (Object[]) vector;
if (groupingFactor.length()>0)
sb.append("(" + key + ",");
else
sb.append("(");
for (Object elem : elements) {
sb.append(elem + ",");
}
sb.append(value + ")");
if (counter<innerdistrsize-1)
sb.append(",");
counter++;
}
if (distribscounter<distrsize-1)
sb.append(",");
}
int len = sb.length()-1;
String insertBuffer = DatabaseUtils.insertFromBuffer(finalTableName, featuresTableColumns + "," + FinalTableValue, sb);
if (groupingFactor.length()>0)
insertBuffer = DatabaseUtils.insertFromBuffer(finalTableName, groupingFactor + "," + featuresTableColumns + "," + FinalTableValue, sb);
// AnalysisLogger.getLogger().debug("Insertion Query " + insertBuffer);
AnalysisLogger.getLogger().debug("Writing Distribution into the DB ");
DatabaseFactory.executeSQLUpdate(insertBuffer, dbConnection);
AnalysisLogger.getLogger().debug("Done!");
}
@Override
public float getInternalStatus() {
return status;
}
@Override
public String getMainInfoID(Object mainInfo) {
if (groupingFactor.length()==0)
return "1";
else
return "" + ((Object[]) mainInfo)[0];
}
@Override
public String getGeographicalID(Object geoInfo) {
return "";
}
@Override
public ALG_PROPS[] getProperties() {
ALG_PROPS[] p = { ALG_PROPS.PHENOMENON_VS_GEOINFO };
return p;
}
@Override
public void postProcess() {
status = 100f;
if (dbConnection==null){
dbConnection.close();
}
}
@Override
public List<StatisticalType> getInputParameters() {
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
List<TableTemplates> templateOccs = new ArrayList<TableTemplates>();
templateOccs.add(TableTemplates.GENERIC);
InputTable p1 = new InputTable(templateOccs, FeaturesTableP, "a Table containing features vectors", "occurrences");
ColumnTypesList p2 = new ColumnTypesList(FeaturesTableP, FeaturesTableColumnsP, "column names of the features", false);
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, FinalTableLabel, "table name of the resulting distribution", "Distrib_");
ServiceType p4 = new ServiceType(ServiceParameters.RANDOMSTRING, FinalTableName, "table name of the distribution", "distrib_");
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, GroupingFactor, "identifier for grouping sets of vectors (blank for automatic enum)", "speciesid");
PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, ModelName, "the name of a previously trained model", "neuralnet_");
ServiceType p7 = new ServiceType(ServiceParameters.USERNAME, UserName,"LDAP username");
parameters.add(p1);
parameters.add(p2);
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
parameters.add(p6);
parameters.add(p7);
DatabaseType.addDefaultDBPars(parameters);
return parameters;
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> template = new ArrayList<TableTemplates>();
template.add(TableTemplates.TESTSET);
return new OutputTable(template, finalTableLabel, finalTableName, "Output table");
}
@Override
public void init(AlgorithmConfiguration config) throws Exception {
status = 0;
AnalysisLogger.getLogger().debug("Initializing Database Connection");
this.config = config;
// initialization of the variables
finalTableLabel = config.getParam(FinalTableLabel);
finalTableName = config.getParam(FinalTableName);
featuresTable = config.getParam(FeaturesTableP);
featuresTableColumns = config.getParam(FeaturesTableColumnsP);
groupingFactor = config.getParam(GroupingFactor);
if (groupingFactor==null) groupingFactor="";
modelName = config.getParam(ModelName);
userName = config.getParam(UserName);
// create a new table
dbConnection = DatabaseUtils.initDBSession(config);
try {
AnalysisLogger.getLogger().debug("Dropping table " + finalTableName);
String dropStatement = DatabaseUtils.dropTableStatement(finalTableName);
DatabaseFactory.executeSQLUpdate(dropStatement, dbConnection);
AnalysisLogger.getLogger().debug("Table " + finalTableName + " dropped");
} catch (Exception e) {
AnalysisLogger.getLogger().debug("Could not drop table " + finalTableName);
}
// create Table
String[] features = featuresTableColumns.split(AlgorithmConfiguration.getListSeparator());
String columns = "";
featuresTableColumns="";
for (int i = 0; i < features.length; i++) {
columns += features[i] + " real";
featuresTableColumns+=features[i];
if (i < features.length - 1){
columns += ",";
featuresTableColumns+=",";
}
}
String createStatement = "create table "+finalTableName+" ( "+columns+", "+FinalTableValue+" "+FinalTableValueType+")";
if (groupingFactor.length()>0){
createStatement = "create table "+finalTableName+" ( "+groupingFactor+" character varying "+columns+", "+FinalTableValue+" "+FinalTableValueType+")";
}
AnalysisLogger.getLogger().debug("Creating table: " + finalTableName + " by statement: " + createStatement);
DatabaseFactory.executeSQLUpdate(createStatement, dbConnection);
AnalysisLogger.getLogger().debug("Table: " + finalTableName + " created");
/*
AnalysisLogger.getLogger().debug("Adding a new column to "+finalTableName);
DatabaseFactory.executeSQLUpdate(DatabaseUtils.addColumnStatement(finalTableLabel, FinalTableValue, FinalTableValueType), dbConnection);
*/
status = 10f;
}
}

View File

@ -0,0 +1,52 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network;
public class FeedForwardNeuralNetworkDistribution extends BayesianDistribution{
private Neural_Network neuralnet;
@Override
public float calcProb(Object mainInfo, Object area) {
Object[] vector = (Object[]) area;
double[] features = new double[neuralnet.getNumberOfInputs()-1];
for (int i=0;i<vector.length;i++){
if (vector[i]==null)
vector[i]=0;
if (i<features.length)
features[i]=Double.parseDouble(""+vector[i]);
}
return (float) neuralnet.getCorrectValueFromOutput(neuralnet.propagate(features)[0]);
// return 0;
}
@Override
public void singleStepPreprocess(Object mainInfo, Object area) {
//load a Neural Network for this information
String persistencePath = config.getPersistencePath();
String filename = persistencePath + Neural_Network.generateNNName(""+mainInfo, userName, modelName);
neuralnet = Neural_Network.loadNN(filename);
AnalysisLogger.getLogger().debug("Using neural network with emission range: ("+neuralnet.minfactor+" ; "+neuralnet.maxfactor+"" );
}
@Override
public void singleStepPostprocess(Object mainInfo, Object allAreasInformation) {
}
@Override
public String getName() {
return "FEED_FORWARD_A_N_N_DISTRIBUTION";
}
@Override
public String getDescription() {
return "A Bayesian method using a Feed Forward Neural Network simulating a function from the features space (R^n) to R";
}
}

View File

@ -0,0 +1,104 @@
package org.gcube.dataanalysis.ecoengine.test.checks;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestBayesianModels {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main1(String[] args) throws Exception {
/*
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal1());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
*/
List<Modeler> generators = ModelersFactory.getModelers(testConfigLocal1());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
}
public static void main(String[] args) throws Exception {
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigLocal2());
generators.get(0).init();
Regressor.process(generators.get(0));
generators = null;
}
private static AlgorithmConfiguration testConfigLocal2() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setPersistencePath("./cfg/");
config.setNumberOfResources(1);
config.setModel("FEED_FORWARD_A_N_N_DISTRIBUTION");
config.setParam("FeaturesTable","hcaf_d");
config.setParam("FeaturesColumnNames","depthmin"+AlgorithmConfiguration.getListSeparator()+
"depthmean"+AlgorithmConfiguration.getListSeparator()+
"depthsd"+AlgorithmConfiguration.getListSeparator()+
"sstanmean"+AlgorithmConfiguration.getListSeparator()+
"sstansd"+AlgorithmConfiguration.getListSeparator()+
"sstmnmax"+AlgorithmConfiguration.getListSeparator()+
"sstmnmin"+AlgorithmConfiguration.getListSeparator()+
"sstmnrange"+AlgorithmConfiguration.getListSeparator()+
"sbtanmean"+AlgorithmConfiguration.getListSeparator());
config.setParam("FinalTableLabel","bayesian_1");
config.setParam("FinalTableName", "bayesian_1");
config.setParam("GroupingFactor","");
config.setParam("ModelName","neuralnetwork");
config.setParam("UserName","gianpaolo.coro");
return config;
}
private static AlgorithmConfiguration testConfigLocal1() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setPersistencePath("./cfg/");
config.setNumberOfResources(1);
config.setModel("FEED_FORWARD_ANN");
config.setParam("TrainingDataSet","hcaf_d");
config.setParam("TrainingColumns","depthmin"+AlgorithmConfiguration.getListSeparator()+
"depthmean"+AlgorithmConfiguration.getListSeparator()+
"depthsd"+AlgorithmConfiguration.getListSeparator()+
"sstanmean"+AlgorithmConfiguration.getListSeparator()+
"sstansd"+AlgorithmConfiguration.getListSeparator()+
"sstmnmax"+AlgorithmConfiguration.getListSeparator()+
"sstmnmin"+AlgorithmConfiguration.getListSeparator()+
"sstmnrange"+AlgorithmConfiguration.getListSeparator()+
"sbtanmean"+AlgorithmConfiguration.getListSeparator());
config.setParam("TargetColumn","primprodmean");
config.setParam("LayersNeurons", "100"+AlgorithmConfiguration.getListSeparator()+"2");
config.setParam("NeuralNetworkName","neuralnetwork");
config.setParam("UserName","gianpaolo.coro");
config.setParam("Reference","1");
config.setParam("ModelName","neuralnetwork");
return config;
}
}

View File

@ -6,6 +6,7 @@ import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis;
import org.gcube.dataanalysis.ecoengine.evaluation.bioclimate.InterpolateTables.INTERPOLATIONFUNCTIONS;
@ -20,7 +21,7 @@ public class TestDBNEXTEvaluators {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Evaluator> trans = null;
/*
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
@ -54,14 +55,18 @@ public static void main(String[] args) throws Exception {
Regressor.process(trans.get(0));
trans = null;
*/
trans = EvaluatorsFactory.getEvaluators(testConfigLocal12());
// List<Evaluator> trans = null;
// trans = EvaluatorsFactory.getEvaluators(testConfigLocal12());
List<Transducerer> trans = TransducerersFactory.getTransducerers(testConfigLocal4());
trans.get(0).init();
Regressor.process(trans.get(0));
PrimitiveType output = (PrimitiveType) trans.get(0).getOutput();
HashMap<String, String> out = (HashMap<String, String>)output.getContent();
DiscrepancyAnalysis.visualizeResults(out);
// PrimitiveType output = (PrimitiveType) trans.get(0).getOutput();
// HashMap<String, String> out = (HashMap<String, String>)output.getContent();
// DiscrepancyAnalysis.visualizeResults(out);
StatisticalType output = trans.get(0).getOutput();
trans = null;
@ -112,6 +117,11 @@ public static void main(String[] args) throws Exception {
config.setParam("NumberOfInterpolations","2");
config.setParam("InterpolationFunction",INTERPOLATIONFUNCTIONS.LINEAR.name());
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://dbtest.next.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
return config;
}

View File

@ -17,10 +17,12 @@ public class Regressor {
String resLoad = agent.getResourceLoad();
String ress = agent.getResources();
/*
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("STATUS: " + agent.getStatus());
Thread.sleep(1000);
*/
Thread.sleep(10000);
}
} else
System.out.println("Generator Algorithm Not Supported" );

View File

@ -33,6 +33,7 @@ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
List<TableTemplates> templatesOccurrence = new ArrayList<TableTemplates>();
templatesOccurrence.add(TableTemplates.OCCURRENCE_SPECIES);
// occurrence points tables
PrimitiveType p0 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, finalTableNameL,"the name of the produced table", "Occ_");
InputTable p1 = new InputTable(templatesOccurrence, tableNameF, "The table containing the occurrence points", "");
// string parameters
ColumnType p3 = new ColumnType(tableNameF, longitudeColumn, "column with longitude values", "decimallongitude", false);
@ -41,6 +42,7 @@ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
PrimitiveType p10 = new PrimitiveType(Enum.class.getName(), inseasonearth.values(), PrimitiveTypes.ENUMERATED, filterTypeF, "The filter type",""+inseasonearth.MARINE);
List<StatisticalType> inputs = new ArrayList<StatisticalType>();
inputs.add(p0);
inputs.add(p1);
inputs.add(p3);
inputs.add(p4);
@ -60,6 +62,7 @@ public class OccurrencePointsInSeaOnEarth extends OccurrencePointsMerger{
latFld = config.getParam(latitudeColumn);
tableName = config.getParam(tableNameF);
finalTableName = config.getParam(finalTableNameF);
finalTableLabel= config.getParam(finalTableNameL);
filter = inseasonearth.valueOf(config.getParam(filterTypeF));
status = 0;
}

View File

@ -204,7 +204,7 @@ public class OccurrencePointsMerger implements Transducerer {
List<TableTemplates> templatesOccurrence = new ArrayList<TableTemplates>();
templatesOccurrence.add(TableTemplates.OCCURRENCE_SPECIES);
// occurrence points tables
PrimitiveType p0 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, finalTableNameL,"the name of the produced table", "MergedOcc_");
PrimitiveType p0 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, finalTableNameL,"the name of the produced table", "Occ_");
InputTable p1 = new InputTable(templatesOccurrence, leftTableNameF, "the First table containing the occurrence points", "");
InputTable p2 = new InputTable(templatesOccurrence, rightTableNameF, "the Second table containing the occurrence points", "");

View File

@ -186,6 +186,14 @@ public class DatabaseFactory{
}
public static Connection getDBConnection(String drivername,String username, String password, String databaseurl) throws Exception{
// Load the database driver
Class.forName(drivername) ;
// Get a connection to the database
Connection conn = DriverManager.getConnection(databaseurl,username,password) ;
return conn;
}
public static void executeUpdateNoTransaction(final String query, String drivername,String username, String password, String databaseurl, boolean useSQL) throws Exception{
// Load the database driver

View File

@ -1,9 +1,13 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.io.FileInputStream;
import java.sql.Connection;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory;
import org.postgresql.copy.CopyManager;
import org.postgresql.core.BaseConnection;
public class DatabaseUtils {
@ -222,6 +226,16 @@ public class DatabaseUtils {
}
}
public static void createRemoteTableFromFile(String filePath,String tablename,String delimiter,boolean hasHeader,String username, String password, String databaseurl) throws Exception{
Connection conn = DatabaseFactory.getDBConnection("org.postgresql.Driver",username,password,databaseurl);
CopyManager copyManager = new CopyManager((BaseConnection) conn);
FileInputStream fis = new FileInputStream(filePath);
copyManager.copyIn(String.format("COPY %s FROM STDIN WITH DELIMITER '%s' %s", tablename,delimiter,(hasHeader)?"CSV HEADER":"CSV"), fis);
conn.close();
fis.close();
}
public static SessionFactory initDBSession(AlgorithmConfiguration config) {
SessionFactory dbHibConnection = null;
try {

View File

@ -0,0 +1 @@
Used as a library in the gCube Framework

View File

@ -0,0 +1 @@
Used as a library in the gCube Framework

View File

@ -0,0 +1,8 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to
the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is"
and no expressed or implied warranty is given for its use, quality or fitness
for a particular case.

View File

@ -0,0 +1,8 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to
the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is"
and no expressed or implied warranty is given for its use, quality or fitness
for a particular case.

View File

@ -0,0 +1,2 @@
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"

View File

@ -0,0 +1,2 @@
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"

View File

@ -0,0 +1,42 @@
The gCube System - Ecological Engine Library
------------------------------------------------------------
This work is partially funded by the European Commission in the
context of the D4Science project (www.d4science.eu), under the
1st call of FP7 IST priority.
Authors
-------
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
Version and Release Date
------------------------
version 1.2.0 (03-05-2012)
Description
--------------------
Support library for statistics analysis on Time Series data.
Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/
Documentation
-------------
VREManager documentation is available on-line from the Projects Documentation Wiki:
https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -0,0 +1,42 @@
The gCube System - Ecological Engine Library
------------------------------------------------------------
This work is partially funded by the European Commission in the
context of the D4Science project (www.d4science.eu), under the
1st call of FP7 IST priority.
Authors
-------
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
Version and Release Date
------------------------
version 1.2.0 (03-05-2012)
Description
--------------------
Support library for statistics analysis on Time Series data.
Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/
Documentation
-------------
VREManager documentation is available on-line from the Projects Documentation Wiki:
https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -0,0 +1,7 @@
<ReleaseNotes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="xsd/changelog.xsd">
<Changeset component="org.gcube.content-management.ecological-engine.1-0-0"
date="2012-02-23">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -0,0 +1,7 @@
<ReleaseNotes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="xsd/changelog.xsd">
<Changeset component="org.gcube.content-management.ecological-engine.1-0-0"
date="2012-02-23">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID></ID>
<Type>Library</Type>
<Profile>
<Description>Ecological Engine Library</Description>
<Class>EcologicalEngine</Class>
<Name>ecological-engine</Name>
<Version>1.2.0</Version>
<Packages>
<Software>
<Name>ecological-engine</Name>
<Version>1.3.0-SNAPSHOT</Version>
<MavenCoordinates>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine</artifactId>
<version>1.3.0-SNAPSHOT</version>
</MavenCoordinates>
<Files>
<File>ecological-engine-1.3.0-SNAPSHOT.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

View File

@ -0,0 +1 @@
https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine