Gianpaolo Coro 2016-02-29 12:32:54 +00:00
parent f27b50e127
commit 874c3e203a
11 changed files with 25 additions and 354 deletions

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>ecological-engine-external-algorithms</name>
<name>ecological-engine-external-algorithms1.1.5</name>
<comment></comment>
<projects>
</projects>

View File

@ -1,2 +1,6 @@
v. 1.0.0 (19-03-2013)
* First release
v. 1.1.5 (05-09-2014)
* Geothermal analysis
v. 1.1.6 (30-06-2015)
* Introducing average annual precipitation
v. 1.1.7 (23-09-2015)
* Adding algorithms by univ Brazil

View File

@ -17,7 +17,7 @@ Authors
Version and Release Date
------------------------
version 1.0.0 (19-04-2013)
version 1.1.7 (23-09-2015)
Description
--------------------
@ -29,7 +29,7 @@ Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
https://svn.d4science.research-infrastructures.eu/gcube/branches/data-analysis/EcologicalEngineExternalAlgorithms/1.1/
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/

View File

@ -10,4 +10,13 @@
<Changeset component="${build.finalName}" date="2013-10-22">
<Change>Fix problem in species/taxa procedures </Change>
</Changeset>
<Changeset component="${build.finalName}" date="2014-09-05">
<Change>Geothermal analysis </Change>
</Changeset>
<Changeset component="${build.finalName}" date="2015-06-30">
<Change>Introducing average annual precipitation</Change>
</Changeset>
<Changeset component="${build.finalName}" date="2015-09-23">
<Change>Adding algorithms by univ Brazil</Change>
</Changeset>
</ReleaseNotes>

View File

@ -1 +1 @@
https://svn.d4science.research-infrastructures.eu/gcube/branches/data-analysis/EcologicalEngineExternalAlgorithms/1.0
https://svn.d4science.research-infrastructures.eu/gcube/branches/data-analysis/EcologicalEngineExternalAlgorithms/1.1/

85
pom.xml
View File

@ -9,7 +9,7 @@
</parent>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine-external-algorithms</artifactId>
<version>1.1.7-SNAPSHOT</version>
<version>1.1.8-SNAPSHOT</version>
<name>ecological-engine-external-algorithms</name>
<description>ecological-engine-external-algorithms library</description>
<properties>
@ -26,54 +26,15 @@
<artifactId>taxamatchpostgres</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>8.4-702.jdbc4</version>
</dependency>
<dependency>
<groupId>rapidminer-custom</groupId>
<artifactId>gsay-service</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-gcore-stubs</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-encryption</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.6.4</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.16</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.6.4</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spd-client-library</artifactId>
<version>[1.0.0-SNAPSHOT, 4.0.0-SNAPSHOT)</version>
<version>[1.0.0, 4.0.0)</version>
<exclusions>
<exclusion>
<groupId>org.gcube.data.spd</groupId>
@ -81,8 +42,6 @@
</exclusion>
</exclusions>
</dependency>
<!-- This dependency is needed and is need on TOP to use GWT UI BInder
without old Xerces version of gCore complaining -->
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
@ -92,54 +51,20 @@
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spql-parser</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<version>[2.0.0, 3.0.0)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>csv4j</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<version>[1.0.0, 2.0.0)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spd-model</artifactId>
<version>[1.0.0-SNAPSHOT, 4.0.0-SNAPSHOT)</version>
<version>[1.0.0, 4.0.0)</version>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>discovery-client</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-clients</artifactId>
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-gcore-clients</artifactId>
<version>[1.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>ic-client</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
</dependencies>
<repositories>
<repository>

View File

@ -3,7 +3,6 @@ package org.gcube.dataanalysis.fin.taxamatch;
import java.util.ArrayList;
import java.util.List;
import org.apache.bcel.generic.ALOAD;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;

View File

@ -1,192 +0,0 @@
package org.gcube.dataanalysis.precipitations;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.hibernate.SessionFactory;
public class AnnualPrecipitation extends StandardLocalExternalAlgorithm {
// Class Attributes
String outputtablename;
String outputtable;
@Override
public String getDescription() {
return "This is a simple algorithm that returns the annual precipitation. The input is a general tabular resource with two columns (date and precipitation).";
}
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Initialization");
}
@Override
protected void process() throws Exception {
// Recovering data
config.setParam("DatabaseDriver", "org.postgresql.Driver");
SessionFactory dbconnection = DatabaseUtils.initDBSession(config);
try {
String tablename = getInputParameter("PrecTable");
String columnnames = getInputParameter("PrecColumns");
outputtablename = getInputParameter("OutputTableName");
outputtable = getInputParameter("OutputTable");
String[] columnlist = columnnames.split(AlgorithmConfiguration.getListSeparator());
List<Object> dataList = DatabaseFactory.executeSQLQuery("select " + columnlist[0] + " from " + tablename + " order by " + columnlist[0] + " asc", dbconnection);
List<Object> precList = DatabaseFactory.executeSQLQuery("select " + columnlist[1] + " from " + tablename + " order by " + columnlist[0] + " asc", dbconnection);
// Business Logic
AnalysisLogger.getLogger().info("Creating output table [" + "create table " + outputtable + " (year integer, value real)]");
DatabaseFactory.executeSQLUpdate("create table " + outputtable + " (year integer, value real)", dbconnection);
Double averageValue = new Double(0);
for (int i = 0; i < dataList.size(); i++) { // for each line
if (i == 0) { // first iteration
averageValue = averageValue + Double.parseDouble(String.valueOf(precList.get(i)));
if (dataList.size() == 1) { // if first iteration is also
// the last
Date currentDate = anyStringToDate(String.valueOf(dataList.get(i)));
AnalysisLogger.getLogger().info("i=0 Current Date: "+currentDate);
Calendar currentDateCal = Calendar.getInstance();
currentDateCal.setTime(currentDate);
int yearCurrentDate = currentDateCal.get(Calendar.YEAR);
AnalysisLogger.getLogger().info("Inserting into table " + "insert into " + outputtable + " (year,value) values (" + yearCurrentDate + "," + averageValue + ")");
DatabaseFactory.executeSQLUpdate("insert into " + outputtable + " (year,value) values (" + yearCurrentDate + "," + averageValue + ")", dbconnection);
averageValue = new Double(0);
}
}
if (i > 0) { // other iterations
Date currentDate = anyStringToDate(String.valueOf(dataList.get(i)));
Date lastDate = anyStringToDate(String.valueOf(dataList.get(i - 1)));
AnalysisLogger.getLogger().info("Current Date: "+currentDate);
AnalysisLogger.getLogger().info("Last Date: "+lastDate);
Calendar currentDateCal = Calendar.getInstance();
currentDateCal.setTime(currentDate);
int yearCurrentDate = currentDateCal.get(Calendar.YEAR);
Calendar lastDateCal = Calendar.getInstance();
lastDateCal.setTime(lastDate);
int yearLastDate = lastDateCal.get(Calendar.YEAR);
if (yearCurrentDate > yearLastDate) {
// inserting average annual
AnalysisLogger.getLogger().info("Inserting into table " + "insert into " + outputtable + " (year,value) values (" + yearLastDate + "," + averageValue + ")");
DatabaseFactory.executeSQLUpdate("insert into " + outputtable + " (year,value) values (" + yearLastDate + "," + averageValue + ")", dbconnection);
averageValue = new Double(0);
averageValue = new Double(String.valueOf(precList.get(i)));
} else {
averageValue = averageValue + Double.parseDouble(String.valueOf(precList.get(i)));
}
}
if (i != 0 && i == dataList.size() - 1) { // last iteration
Date currentDate = anyStringToDate(String.valueOf(dataList.get(i)));
Calendar currentDateCal = Calendar.getInstance();
currentDateCal.setTime(currentDate);
int yearCurrentDate = currentDateCal.get(Calendar.YEAR);
AnalysisLogger.getLogger().info("Inserting into table " + "insert into " + outputtable + " (year,value) values (" + yearCurrentDate + "," + averageValue + ")");
DatabaseFactory.executeSQLUpdate("insert into " + outputtable + " (year,value) values (" + yearCurrentDate + "," + averageValue + ")", dbconnection);
averageValue = new Double(0);
}
}
} catch (Exception e) {
AnalysisLogger.getLogger().error(e.getMessage());
throw e;
} finally {
DatabaseUtils.closeDBConnection(dbconnection);
}
}
@Override
protected void setInputParameters() {
// First parameter: Internal tabular resource
List<TableTemplates> templates = new ArrayList<TableTemplates>();
templates.add(TableTemplates.GENERIC);
InputTable tinput = new InputTable(templates, "PrecTable", "Precipitation tabular resource");
ColumnTypesList columns = new ColumnTypesList("PrecTable", "PrecColumns", "Selected columns for date and precipitation", false);
inputs.add(tinput);
inputs.add(columns);
// Second parameter: Output table
ServiceType randomstring = new ServiceType(ServiceParameters.RANDOMSTRING, "OutputTable", "", "prec");
inputs.add(randomstring);
DatabaseType.addDefaultDBPars(inputs);
// Third parameter: Output table name
addStringInput("OutputTableName", "The name of the output table", "prec_");
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> outtemplate = new ArrayList<TableTemplates>();
outtemplate.add(TableTemplates.GENERIC);
OutputTable out = new OutputTable(outtemplate, outputtablename, outputtable, "The output table containing all the matches");
return out;
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
}
/**
* An auxiliary method that converts string to date
*
* @param value
* @return
*/
public static Date anyStringToDate(String value) {
Date date = null;
try {
if (isValidFormat("yyyy-mm-dd", value)) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-mm-dd");
date = sdf.parse(value);
}
if (isValidFormat("yyyymmdd", value)) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyymmdd");
date = sdf.parse(value);
}
if (isValidFormat("dd/mm/yyyy", value)) {
SimpleDateFormat sdf = new SimpleDateFormat("dd/mm/yyyy");
date = sdf.parse(value);
}
if (isValidFormat("dd-mm-yyyy", value)) {
SimpleDateFormat sdf = new SimpleDateFormat("dd-mm-yyyy");
date = sdf.parse(value);
}
} catch (Exception e) {
date = null;
}
return date;
}
/**
* An auxiliary method that validates if a string respect a specified date format
*
* @param format
* @param value
* @return
*/
public static boolean isValidFormat(String format, String value) {
Date date = null;
try {
SimpleDateFormat sdf = new SimpleDateFormat(format);
date = sdf.parse(value);
if (!value.equals(sdf.format(date))) {
date = null;
}
} catch (ParseException ex) {
date = null;
}
return date != null;
}
}

View File

@ -1,49 +0,0 @@
package org.gcube.dataanalysis.test;
import java.util.List;
import java.util.UUID;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestAverageAnnualPrecipitation {
/**
* @param args
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST of PREC ALGO 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfig());
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfig() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setAgent("AVERAGE_ANNUAL_PREC");
config.setParam("DatabaseUserName","postgres");
config.setParam("DatabasePassword","postgres");
config.setParam("DatabaseURL","jdbc:postgresql://localhost");
config.setParam("DatabaseDriver","org.postgresql.Driver");
config.setParam("PrecTable", "test");
config.setParam("PrecColumns", "day" + AlgorithmConfiguration.listSeparator + "precipitation");
String tablename = "testprec" + (UUID.randomUUID());
config.setParam("OutputTableName", "Test Prec");
config.setParam("OutputTable", tablename.replace("-", ""));
return config;
}
}

View File

@ -1,4 +1,4 @@
package org.gube.examples;
package org.gcube.examples;
import java.io.File;
import java.io.FileWriter;

View File

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ID></ID>
<Type>Library</Type>
<Profile>
<Description>Ecological Engine Library</Description>
<Class>EcologicalEngineExternalAlgorithms</Class>
<Name>ecological-engine-external-algorithms</Name>
<Version>1.1.1</Version>
<Packages>
<Software>
<Name>ecological-engine-external-algorithms</Name>
<Version>1.1.7-SNAPSHOT</Version>
<MavenCoordinates>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine-external-algorithms</artifactId>
<version>1.1.7-SNAPSHOT</version>
</MavenCoordinates>
<Files>
<File>ecological-engine-external-algorithms-1.1.7-SNAPSHOT.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>