This commit is contained in:
Gianpaolo Coro 2012-12-20 14:50:26 +00:00
parent 4c76c222b3
commit 2e08d0ac6c
37 changed files with 220573 additions and 49 deletions

220095
Analysis.log.1 Normal file

File diff suppressed because one or more lines are too long

View File

@ -4,6 +4,7 @@ import java.io.File;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer;
@ -94,9 +95,18 @@ public class InterpolateTables {
String gotColumn = utils.getColumnName(j);
String gotColumnType = utils.getColumnType(j);
String javatype = DataTypeRecognizer.transformTypeFromDB(gotColumnType);
List<Object> takeFirstColumn = DatabaseFactory.executeSQLQuery(DatabaseUtils.getOrderedElements(table1, utils.getPrimaryKey(), gotColumn), referencedbConnection);
List<Object> takeSecondColumn = DatabaseFactory.executeSQLQuery(DatabaseUtils.getOrderedElements(table2, utils.getPrimaryKey(), gotColumn), referencedbConnection);
String takeF = DatabaseUtils.getOrderedElements(table1, utils.getPrimaryKey(), gotColumn);
String takeS = DatabaseUtils.getOrderedElements(table2, utils.getPrimaryKey(), gotColumn);
AnalysisLogger.getLogger().debug("Taking First column->" + takeF);
AnalysisLogger.getLogger().debug("Taking Second column->" + takeS);
List<Object> takeFirstColumn = DatabaseFactory.executeSQLQuery(takeF, referencedbConnection);
List<Object> takeSecondColumn = DatabaseFactory.executeSQLQuery(takeS, referencedbConnection);
AnalysisLogger.getLogger().debug("First column elements size->" + takeFirstColumn.size());
AnalysisLogger.getLogger().debug("Second column elements size->" + takeSecondColumn.size());
// only if data are of numeric type, perform calculation
if (javatype.equals(BigDecimal.class.getName())) {
AnalysisLogger.getLogger().debug("interpolating -> " + gotColumn);
@ -218,7 +228,7 @@ public class InterpolateTables {
if (yearCals == endYear)
yearCals = endYear - 1;
String filename = temporaryDirectory + initialFile + "_" + (yearCals) + "_" + function.name() + "_" + i + System.currentTimeMillis() + ".csv";
String filename = temporaryDirectory + "interp_" + (yearCals) + "_" + function.name() + "_" + i + System.currentTimeMillis() + ".csv";
FileTools.saveString(filename, completeFile.toString(), true, "UTF-8");
producedfiles[i] = new File(filename);
System.out.println("PRODUCED FILE TO COPY "+producedfiles[i]);

View File

@ -1,5 +1,6 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -41,6 +42,7 @@ public abstract class BayesianDistribution implements SpatialProbabilityDistribu
protected String finalTableName;
protected String finalTableLabel;
protected String modelName;
protected File modelFile;
protected String userName;
protected String groupingFactor;
protected SessionFactory dbConnection;
@ -159,7 +161,8 @@ public abstract class BayesianDistribution implements SpatialProbabilityDistribu
PrimitiveType p3 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, FinalTableLabel, "table name of the resulting distribution", "Distrib_");
ServiceType p4 = new ServiceType(ServiceParameters.RANDOMSTRING, FinalTableName, "table name of the distribution", "distrib_");
PrimitiveType p5 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, GroupingFactor, "identifier for grouping sets of vectors (blank for automatic enum)", "speciesid");
PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, ModelName, "the name of a previously trained model", "neuralnet_");
// PrimitiveType p6 = new PrimitiveType(String.class.getName(), null, PrimitiveTypes.STRING, ModelName, "the name of a previously trained model", "neuralnet_");
PrimitiveType p6 = new PrimitiveType(File.class.getName(), null, PrimitiveTypes.FILE, ModelName, "neuralnet_");
ServiceType p7 = new ServiceType(ServiceParameters.USERNAME, UserName,"LDAP username");
parameters.add(p1);
@ -194,7 +197,10 @@ public abstract class BayesianDistribution implements SpatialProbabilityDistribu
groupingFactor = config.getParam(GroupingFactor);
if (groupingFactor==null) groupingFactor="";
modelName = config.getParam(ModelName);
modelFile = new File(config.getParam(ModelName));
modelName = modelFile.getName();
userName = config.getParam(UserName);
// create a new table

View File

@ -29,7 +29,8 @@ public class FeedForwardNeuralNetworkDistribution extends BayesianDistribution{
public void singleStepPreprocess(Object mainInfo, Object area) {
//load a Neural Network for this information
String persistencePath = config.getPersistencePath();
String filename = persistencePath + Neural_Network.generateNNName(""+mainInfo, userName, modelName);
// String filename = persistencePath + Neural_Network.generateNNName(""+mainInfo, userName, modelName);
String filename = modelFile.getAbsolutePath();
neuralnet = Neural_Network.loadNN(filename);
AnalysisLogger.getLogger().debug("Using neural network with emission range: ("+neuralnet.minfactor+" ; "+neuralnet.maxfactor+"" );
}

View File

@ -58,7 +58,7 @@ public static void main(String[] args) throws Exception {
*/
// List<Evaluator> trans = null;
// trans = EvaluatorsFactory.getEvaluators(testConfigLocal12());
List<ComputationalAgent> trans = TransducerersFactory.getTransducerers(testConfigLocal5c());
List<ComputationalAgent> trans = TransducerersFactory.getTransducerers(testConfigLocal5());
trans.get(0).init();
Regressor.process(trans.get(0));
@ -137,21 +137,25 @@ public static void main(String[] args) throws Exception {
config.setParam("scientificNameColumn", "scientificname");
config.setParam("eventDateColumn", "eventdate");
config.setParam("lastModificationColumn", "modified");
config.setParam("leftTableName", "speciesset1");
config.setParam("rightTableName", "speciesset2");
// config.setParam("leftTableName", "speciesset1");
// config.setParam("rightTableName", "speciesset2");
config.setParam("leftTableName", "occurrence_species_idb1a80ed3_0b07_4481_b8f0_95b821b2d4c9");
config.setParam("rightTableName", "occurrence_species_idb1a80ed3_0b07_4481_b8f0_95b821b2d4c9");
// config.setParam("leftTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
// config.setParam("rightTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("leftTableName", "occurrencesdeleted");
config.setParam("rightTableName", "occurrencesdeleted");
config.setParam("finalTableName", "occurrencesmerged");
config.setParam("spatialTolerance", "10.0");
config.setParam("confidence", "0");
config.setParam("spatialTolerance", "0");
config.setParam("confidence", "100");
/*
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://dbtest.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
*/
return config;
}
@ -180,14 +184,18 @@ public static void main(String[] args) throws Exception {
// config.setParam("leftTableName", "processedoccurrences_id_e7b77fc2_f1cf_4a46_b7b7_898b663b65dd");
// config.setParam("rightTableName", "processedoccurrences_id_bd3fdae3_a64e_4215_8eb3_c1bd95981dd2");
config.setParam("leftTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("rightTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("leftTableName", "occurrence_species_idb1a80ed3_0b07_4481_b8f0_95b821b2d4c9");
config.setParam("rightTableName", "occurrence_species_idb1a80ed3_0b07_4481_b8f0_95b821b2d4c9");
config.setParam("leftTableName", "occurrencesdeleted");
config.setParam("rightTableName", "occurrencesdeleted");
// config.setParam("leftTableName", "speciesset1");
// config.setParam("rightTableName", "speciesset2");
config.setParam("finalTableName", "occurrencessubtractedarticle3");
config.setParam("spatialTolerance", "10.0");
config.setParam("confidence", "80");
config.setParam("spatialTolerance", "0");
config.setParam("confidence", "100");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
@ -217,14 +225,21 @@ public static void main(String[] args) throws Exception {
config.setParam("rightTableName", "occurrence_species_id1e8f7b48_b99a_48a3_8b52_89976fd79cd4");
config.setParam("leftTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
*/
config.setParam("leftTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
config.setParam("rightTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
// config.setParam("leftTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
// config.setParam("rightTableName", "occurrence_species_id0045886b_2a7c_4ede_afc4_3157c694b893");
// config.setParam("leftTableName", "occurrence_species_idb1a80ed3_0b07_4481_b8f0_95b821b2d4c9");
// config.setParam("rightTableName", "occurrence_species_idb1a80ed3_0b07_4481_b8f0_95b821b2d4c9");
config.setParam("leftTableName", "occurrencesdeleted");
config.setParam("rightTableName", "occurrencesdeleted");
// config.setParam("leftTableName", "speciesset1");
// config.setParam("rightTableName", "speciesset2");
config.setParam("finalTableName", "occurrencesintersected");
config.setParam("spatialTolerance", "10.0");
config.setParam("confidence", "0");
config.setParam("spatialTolerance", "0");
config.setParam("confidence", "100");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
@ -234,6 +249,33 @@ public static void main(String[] args) throws Exception {
return config;
}
private static AlgorithmConfiguration testConfigLocal5d() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("OCCURRENCES_DUPLICATES_DELETER");
config.setParam("longitudeColumn", "decimallongitude");
config.setParam("latitudeColumn", "decimallatitude");
config.setParam("recordedByColumn", "recordedby");
config.setParam("scientificNameColumn", "scientificname");
config.setParam("eventDateColumn", "eventdate");
config.setParam("lastModificationColumn", "modified");
config.setParam("OccurrencePointsTableName", "occurrence_species_idb1a80ed3_0b07_4481_b8f0_95b821b2d4c9");
config.setParam("finalTableName", "occurrencesdeleted");
config.setParam("spatialTolerance", "0");
config.setParam("confidence", "100");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://dbtest.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver","org.postgresql.Driver");
return config;
}
private static AlgorithmConfiguration testConfigLocal6() {
AlgorithmConfiguration config = Regressor.getConfig();

View File

@ -2,6 +2,7 @@ package org.gcube.dataanalysis.ecoengine.transducers;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
@ -89,7 +90,7 @@ public class InterpolationTransducer implements Transducerer{
@Override
public StatisticalType getOutput() {
HashMap<String, StatisticalType> map = new HashMap<String, StatisticalType>();
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
List<TableTemplates> template = new ArrayList<TableTemplates>();
@ -100,13 +101,13 @@ public class InterpolationTransducer implements Transducerer{
int i=1;
for (String table:producedtables){
// p.add(new OutputTable(template,table,table,"Interpolation number "+i));
map.put("Interpolation "+i, new OutputTable(template,table,table,"Interpolation number "+i));
map.put("Interpolation "+i+" ("+table+")", new OutputTable(template,"Interpolation number "+i,table,"Interpolation number "+i));
i++;
}
}
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "INTEPOLATED_HCAF_TABLE_LIST", "List of HCAF tables produced by the interpolation");
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(), map, PrimitiveTypes.MAP, "INTEPOLATED_HCAF_TABLE_LIST", "List of HCAF tables produced by the interpolation");
return output;

View File

@ -13,7 +13,7 @@ public class OccurrencePointsIntersector extends OccurrencePointsMerger{
@Override
public String getDescription() {
return "Between two Ocurrence Sets, keeps the elements of the Right Set that are not in the Left Set.";
return "Between two Ocurrence Sets, keeps the elements of the Right Set that are similar to elements in the Left Set.";
}
@Override

View File

@ -111,7 +111,9 @@ public class OccurrencePointsMerger implements Transducerer {
for (Object name : columnsNames) {
String name$ = "" + name;
String value$ = "" + row[index];
String value$ = null;
if (row[index]!=null)
value$ = "" + row[index];
if (name$.equalsIgnoreCase(lonFld)) {
record.x = Double.parseDouble(value$);
record.x$ = value$;
@ -146,7 +148,20 @@ public class OccurrencePointsMerger implements Transducerer {
return record;
}
protected String takeEssential(OccurrenceRecord record) {
OccurrenceRecord record2 = new OccurrenceRecord();
record2.scientificName=record.scientificName;
record2.recordedby=record.recordedby;
record2.eventdate=record.eventdate;
record2.modifdate=record.modifdate;
record2.x=record.x;
record2.y=record.y;
record2.x$=record.x$;
record2.y$=record.y$;
return occurrenceRecord2String(record2);
}
public String occurrenceRecord2String(OccurrenceRecord record) {
StringBuffer buffer = new StringBuffer();
int index = 0;
@ -185,8 +200,11 @@ public class OccurrencePointsMerger implements Transducerer {
// value$="'"+record.modifdate.getTimeInMillis()+"'";
}
} else {
if (record.otherValues != null) {
value$ = "'" + record.otherValues.get(k).replace("'", "") + "'";
if ((record.otherValues != null)&&(record.otherValues.size()>0)) {
String v = record.otherValues.get(k);
if ((v!=null)&&(v.length()>0))
value$ = "'" + v.replace("'", "") + "'";
k++;
}
}
@ -311,7 +329,7 @@ public class OccurrencePointsMerger implements Transducerer {
@Override
public String getDescription() {
return "Between two Ocurrence Sets, enrichs the Left Set with the elements of the Right Set that are not in the Left Set. Updates the elements of the Left Set with more recent elements in the Right Set.";
return "Between two Ocurrence Sets, enrichs the Left Set with the elements of the Right Set that are not in the Left Set. Updates the elements of the Left Set with more recent elements in the Right Set. If one element in the Left Set corresponds to several recent elements in the Right Set, these will be all substituted to the element of the Left Set.";
}
protected float probabilityStrings(String first, String second) {
@ -371,10 +389,33 @@ public class OccurrencePointsMerger implements Transducerer {
AnalysisLogger.getLogger().info("Deleting " + todel + " objects");
if (todel > 0) {
for (OccurrenceRecord record : objectstodelete) {
String rec = recordedByFld + "='" + record.recordedby.replace("'", "") + "'";
String sci = scientificNameFld + "='" + record.scientificName.replace("'", "") + "'";
String x = lonFld + "='" + record.x$ + "'";
String y = latFld + "='" + record.y$ + "'";
buffer.append("(");
String rec = null;
if ((record.recordedby !=null ) &&(record.recordedby.length()>0)){
rec = recordedByFld + "='" + record.recordedby.replace("'", "") + "'";
buffer.append(rec);
}
String sci = null;
if ((record.scientificName !=null ) &&(record.scientificName.length()>0)){
if (rec!=null)
buffer.append(" AND ");
sci = scientificNameFld + "='" + record.scientificName.replace("'", "") + "'";
buffer.append(sci);
}
if ((rec!=null) || (sci!=null))
buffer.append(" AND ");
String x = null;
if ((record.x$ != null ) && (record.x$.length()>0))
x = lonFld + "='" + record.x$ + "'";
String y = null;
if ((record.y$ != null ) && (record.y$.length()>0))
y = latFld + "='" + record.y$ + "'";
if ((x!=null) && (y!=null))
buffer.append(x + " AND " + y);
String event = null;
String modified = null;
if (record.eventdate != null)
@ -384,14 +425,23 @@ public class OccurrencePointsMerger implements Transducerer {
if (record.modifdate != null)
modified = modifDatFld + "=to_timestamp('" + convert2conventionalFormat(record.modifdate) + "','" + sqlDateFormat + "')";
buffer.append("(");
buffer.append(rec + " AND " + sci + " AND " + x + " AND " + y);
// buffer.append(rec + " AND " + sci + " AND " + x + " AND " + y);
if (event != null)
buffer.append(" AND " + event);
if (modified != null)
buffer.append(" AND " + modified);
buffer.append(")");
if ((counter>0)&&(counter%500==0)){
String updateQ = DatabaseUtils.deleteFromBuffer(finalTableName, buffer);
// AnalysisLogger.getLogger().debug("Update:\n"+updateQ);
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
AnalysisLogger.getLogger().info("Partial Objects deleted");
buffer = new StringBuffer();
}
else
if (counter < todel - 1)
buffer.append(" OR ");
@ -399,40 +449,61 @@ public class OccurrencePointsMerger implements Transducerer {
}
String updateQ = DatabaseUtils.deleteFromBuffer(finalTableName, buffer);
// AnalysisLogger.getLogger().debug("Update:\n"+updateQ);
// AnalysisLogger.getLogger().debug("Update:\n"+updateQ);
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
AnalysisLogger.getLogger().info("Objects deleted");
AnalysisLogger.getLogger().info("All Objects deleted");
}
buffer = new StringBuffer();
ArrayList<String> insertedStrings = new ArrayList<String>();
int toins = objectstoinsert.size();
AnalysisLogger.getLogger().info("Inserting " + toins + " objects");
counter = 0;
if (toins > 0) {
for (OccurrenceRecord record : objectstoinsert) {
String toInsert=occurrenceRecord2String(record);
String toInsertEssentials=takeEssential(record);
if (!insertedStrings.contains(toInsertEssentials)){
buffer.append("(");
buffer.append(occurrenceRecord2String(record));
insertedStrings.add(toInsertEssentials);
buffer.append(toInsert);
buffer.append(")");
if (counter < toins - 1)
if ((counter>0)&&(counter%500==0)){
insertBuffer(buffer);
AnalysisLogger.getLogger().info("Partial Objects inserted");
buffer = new StringBuffer();
}
else
buffer.append(",");
counter++;
}
}
String updateQ = "SET datestyle = \"ISO, MDY\"; "+DatabaseUtils.insertFromBuffer(finalTableName, columns.toString(), buffer);
// System.out.println("Update:\n"+updateQ);
// AnalysisLogger.getLogger().debug("Update:\n"+updateQ);
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
insertBuffer(buffer);
AnalysisLogger.getLogger().info("Objects inserted");
AnalysisLogger.getLogger().info("Inserted " + counter + " objects");
}
objectstoinsert = null;
objectstodelete = null;
insertedStrings=null;
objectstoinsert = new ArrayList<OccurrencePointsMerger.OccurrenceRecord>();
objectstodelete = new ArrayList<OccurrencePointsMerger.OccurrenceRecord>();
System.gc();
}
protected void insertBuffer(StringBuffer buffer) throws Exception{
String subBuffer = buffer.substring(0, buffer.length()-1);
String updateQ = "SET datestyle = \"ISO, MDY\"; "+DatabaseUtils.insertFromString(finalTableName, columns.toString(), subBuffer);
// System.out.println("Update:\n"+updateQ);
AnalysisLogger.getLogger().debug("Update:\n"+updateQ);
DatabaseFactory.executeSQLUpdate(updateQ, dbconnection);
}
protected void prepareFinalTable() throws Exception {
DatabaseFactory.executeSQLUpdate(DatabaseUtils.duplicateTableStatement(leftTableName, finalTableName), dbconnection);
}
@ -583,6 +654,7 @@ public class OccurrencePointsMerger implements Transducerer {
// close DB connection
} catch (Exception e) {
System.err.println("Error in computation");
e.printStackTrace();
AnalysisLogger.getLogger().info(e);
throw e;
} finally {

View File

@ -8,7 +8,7 @@ public class OccurrencePointsSubtraction extends OccurrencePointsMerger{
@Override
public String getDescription() {
return "Between two Ocurrence Sets, keeps the elements of the Left Set that are not in the Right Set";
return "Between two Ocurrence Sets, keeps the elements of the Left Set that are not similar to any element in the Right Set";
}
protected void manageHighProbability(float probability, OccurrenceRecord leftOcc, OccurrenceRecord rightOcc) {

View File

@ -37,7 +37,7 @@ public class HcafFilter extends QueryExecutor {
bby1$ = config.getParam(bby1);
bby2$ = config.getParam(bby2);
query = "select * into " + finalTableName + " from hcaf_d where (centerlat-0.25)>" + bby1$ + " and (centerlong-0.25)>" + bbx1$ + " and (centerlat+0.25)<" + bby2$ + " and (centerlong+0.25)<" + bbx2$;
query = "select * into " + finalTableName + " from hcaf_d where (centerlat-0.25)>" + bby1$ + " and (centerlong-0.25)>" + bbx1$ + " and (centerlat+0.25)<" + bby2$ + " and (centerlong+0.25)<" + bbx2$+"; ALTER TABLE "+finalTableName+" ADD PRIMARY KEY (\"csquarecode\")";
}
@Override

View File

@ -39,7 +39,7 @@ public class MarineAbsencePointsFromAquamapsDistribution extends QueryExecutor {
if (Boolean.parseBoolean(dorandom))
dorandom$="order by random()";
query = "select * into "+finalTableName+" from hcaf_d as a where a.csquarecode in (select csquarecode from "+aquamapsTable+" where probability<=0.2 and speciesid='"+species+"'"+dorandom$+" limit "+nPoints+") and oceanarea>0";
query = "select * into "+finalTableName+" from hcaf_d as a where a.csquarecode in (select csquarecode from "+aquamapsTable+" where probability<=0.2 and speciesid='"+species+"'"+dorandom$+" limit "+nPoints+") and oceanarea>0; ALTER TABLE "+finalTableName+" ADD PRIMARY KEY (\"csquarecode\");";
}
@Override

View File

@ -33,7 +33,7 @@ public class MarinePresencePoints extends QueryExecutor {
points = "limit "+nPoints;
}
query = "select * into "+finalTableName+" from hcaf_d where csquarecode in (select csquarecode from occurrencecells where speciesid = '"+species+"' limit 100000) and oceanarea>0 "+points;
query = "select * into "+finalTableName+" from hcaf_d where csquarecode in (select csquarecode from occurrencecells where speciesid = '"+species+"' limit 100000) and oceanarea>0 "+points+"; ALTER TABLE "+finalTableName+" ADD PRIMARY KEY (\"csquarecode\")";
}
@Override

View File

@ -166,6 +166,11 @@ public class DatabaseUtils {
return "insert into "+table+" ("+columnsNames+") values "+values;
}
public static String insertFromString(String table, String columnsNames, String values) {
return "insert into "+table+" ("+columnsNames+") values "+values;
}
public static String deleteFromBuffer(String table, StringBuffer couples) {

View File

@ -0,0 +1 @@
Used as a library in the gCube Framework

View File

@ -0,0 +1 @@
Used as a library in the gCube Framework

View File

@ -0,0 +1,8 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to
the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is"
and no expressed or implied warranty is given for its use, quality or fitness
for a particular case.

View File

@ -0,0 +1,8 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to
the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is"
and no expressed or implied warranty is given for its use, quality or fitness
for a particular case.

View File

@ -0,0 +1,2 @@
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"

View File

@ -0,0 +1,2 @@
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"

View File

@ -0,0 +1,42 @@
The gCube System - Ecological Engine Library
------------------------------------------------------------
This work is partially funded by the European Commission in the
context of the D4Science project (www.d4science.eu), under the
1st call of FP7 IST priority.
Authors
-------
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
Version and Release Date
------------------------
version 1.5.0 (26-11-2012)
Description
--------------------
Support library for statistics analysis on Time Series data.
Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/
Documentation
-------------
VREManager documentation is available on-line from the Projects Documentation Wiki:
https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -0,0 +1,42 @@
The gCube System - Ecological Engine Library
------------------------------------------------------------
This work is partially funded by the European Commission in the
context of the D4Science project (www.d4science.eu), under the
1st call of FP7 IST priority.
Authors
-------
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
Version and Release Date
------------------------
version 1.5.0 (26-11-2012)
Description
--------------------
Support library for statistics analysis on Time Series data.
Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/
Documentation
-------------
VREManager documentation is available on-line from the Projects Documentation Wiki:
https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -0,0 +1,7 @@
<ReleaseNotes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="xsd/changelog.xsd">
<Changeset component="org.gcube.content-management.ecological-engine.1-0-0"
date="2012-02-23">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -0,0 +1,7 @@
<ReleaseNotes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="xsd/changelog.xsd">
<Changeset component="org.gcube.content-management.ecological-engine.1-0-0"
date="2012-02-23">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -0,0 +1 @@
Used as a library in the gCube Framework

View File

@ -0,0 +1 @@
Used as a library in the gCube Framework

View File

@ -0,0 +1,8 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to
the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is"
and no expressed or implied warranty is given for its use, quality or fitness
for a particular case.

View File

@ -0,0 +1,8 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to
the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is"
and no expressed or implied warranty is given for its use, quality or fitness
for a particular case.

View File

@ -0,0 +1,2 @@
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"

View File

@ -0,0 +1,2 @@
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"

View File

@ -0,0 +1,42 @@
The gCube System - Ecological Engine Library
------------------------------------------------------------
This work is partially funded by the European Commission in the
context of the D4Science project (www.d4science.eu), under the
1st call of FP7 IST priority.
Authors
-------
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
Version and Release Date
------------------------
version 1.5.0 (26-11-2012)
Description
--------------------
Support library for statistics analysis on Time Series data.
Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/
Documentation
-------------
VREManager documentation is available on-line from the Projects Documentation Wiki:
https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -0,0 +1,42 @@
The gCube System - Ecological Engine Library
------------------------------------------------------------
This work is partially funded by the European Commission in the
context of the D4Science project (www.d4science.eu), under the
1st call of FP7 IST priority.
Authors
-------
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
Version and Release Date
------------------------
version 1.5.0 (26-11-2012)
Description
--------------------
Support library for statistics analysis on Time Series data.
Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/
Documentation
-------------
VREManager documentation is available on-line from the Projects Documentation Wiki:
https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

View File

@ -0,0 +1,7 @@
<ReleaseNotes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="xsd/changelog.xsd">
<Changeset component="org.gcube.content-management.ecological-engine.1-0-0"
date="2012-02-23">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -0,0 +1,7 @@
<ReleaseNotes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="xsd/changelog.xsd">
<Changeset component="org.gcube.content-management.ecological-engine.1-0-0"
date="2012-02-23">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID></ID>
<Type>Library</Type>
<Profile>
<Description>Ecological Engine Library</Description>
<Class>EcologicalEngine</Class>
<Name>ecological-engine</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Name>ecological-engine</Name>
<Version>1.6.0-SNAPSHOT</Version>
<MavenCoordinates>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine</artifactId>
<version>1.6.0-SNAPSHOT</version>
</MavenCoordinates>
<Files>
<File>ecological-engine-1.6.0-SNAPSHOT.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID></ID>
<Type>Library</Type>
<Profile>
<Description>Ecological Engine Library</Description>
<Class>EcologicalEngine</Class>
<Name>ecological-engine</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Name>ecological-engine</Name>
<Version>1.6.0-SNAPSHOT</Version>
<MavenCoordinates>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine</artifactId>
<version>1.6.0-SNAPSHOT</version>
</MavenCoordinates>
<Files>
<File>ecological-engine-1.6.0-SNAPSHOT.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

View File

@ -0,0 +1 @@
https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine

View File

@ -0,0 +1 @@
https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine