Add TrendyLyzer and species/taxa algorithms; change version 1.0.0-> 1.1.0

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineExternalAlgorithms@79495 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Angela Italiano 2013-07-22 10:02:53 +00:00
parent 91a15af3f8
commit 14e04746c9
22 changed files with 3036 additions and 18 deletions

View File

@ -1,7 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
</classpath>

View File

@ -4,4 +4,7 @@
date="2013-03-19">
<Change>First Release</Change>
</Changeset>
<Changeset component="${build.finalName}" date="2013-07-22">
<Change>Add TrendyLyzer algorithms and species/taxa procedures </Change>
</Changeset>
</ReleaseNotes>

View File

@ -6,7 +6,7 @@
<Description>Ecological Engine Library</Description>
<Class>EcologicalEngineExternalAlgorithms</Class>
<Name>${artifactId}</Name>
<Version>1.0.0</Version>
<Version>1.1.0</Version>
<Packages>
<Software>
<Name>${artifactId}</Name>

102
pom.xml
View File

@ -9,7 +9,7 @@
</parent>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine-external-algorithms</artifactId>
<version>1.0.0-SNAPSHOT</version>
<version>1.1.0</version>
<name>ecological-engine-external-algorithms</name>
<description>ecological-engine-external-algorithms library</description>
<properties>
@ -20,12 +20,6 @@
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine</artifactId>
<version>1.7.0-SNAPSHOT</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>postgresql</groupId>
@ -33,10 +27,98 @@
<version>8.4-702.jdbc4</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.9</version>
<groupId>org.gcube.common</groupId>
<artifactId>rapidminer-custom</artifactId>
<version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-gcore-stubs</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-encryption</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spd-client-library</artifactId>
<version>[3.0.0-SNAPSHOT, 4.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<!-- This dependency is needed and is need on TOP to use GWT UI BInder
without old Xerces version of gCore complaining -->
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.9.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spql-parser</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>csv4j</artifactId>
<version>[1.1.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spd-model</artifactId>
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<!-- UNCOMMENT THIS FOR RELEASE -->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>discovery-client</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-clients</artifactId>
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-fw-clients</artifactId>
<version>1.0.0-SNAPSHOT</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>ic-client</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
</dependency>
</dependencies>
<repositories>
<repository>

View File

@ -0,0 +1,13 @@
package org.gcube.dataanalysis.JobSMspd;
/**
* Hello world!
*
*/
public class App
{
public static void main( String[] args )
{
System.out.println( "Hello World!" );
}
}

View File

@ -0,0 +1,7 @@
package org.gcube.dataanalysis.JobSMspd;
public enum ExpandOption {
EXPAND,
NOT_EXPAND
}

View File

@ -0,0 +1,485 @@
package org.gcube.dataanalysis.JobSMspd;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.data.spd.client.proxies.Manager;
import org.gcube.data.spd.model.Conditions;
import org.gcube.data.spd.model.PluginDescription;
import org.gcube.data.spd.model.util.Capabilities;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.DynamicEnum;
import org.hibernate.SessionFactory;
public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
static String databaseParameterName = "FishBase";
static String userParameterName = "user";
static String passwordParameterName = "password";
static String urlParameterName = "FishBase";
SessionFactory dbconnection = null;
public static boolean call=false;
String tablename;
String columnnames;
List<Object> speciesList;
protected String fileName;
BufferedWriter out;
String outputtablename;
String outputErrortablename;
String outputtable;
HashMap<String, String>dpHash= new HashMap<String, String>();
HashMap<String, String>dpUHash= new HashMap<String, String>();
HashMap<String, String>dpEHash= new HashMap<String, String>();
String tableError;
private DataPenum dp = new DataPenum();
private ExtentionDPEnum dpE = new ExtentionDPEnum();
private UnfoldDPEnum dpU = new UnfoldDPEnum();
private String dataProvider = "Data Provider :";
private String chosendataProvider = new String();
private String dataProviderExtention = "Data Provider (Expand Option):";
private String chosendataProviderExtention=new String();
//private String chosendataProviderUnfold="Data Provider Unfold:";
private String dataProviderUnfold = "Data Provider (Unfold Option):";
private String chosendataProviderUnfold=new String();
@Override
public String getDescription() {
return "An algorithm that once data provider and search options are selecteced, it has to return the occurrences";
}
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("SMFaoAlg");
}
public void fulfilParameters() throws IOException {
config.setParam("DatabaseDriver", "org.postgresql.Driver");
dbconnection = DatabaseUtils.initDBSession(config);
tablename = getInputParameter("SpeciesTable");
columnnames = getInputParameter("SpeciesColumns");
outputtablename = getInputParameter("OutputTableName");
outputtable = getInputParameter("OutputTable");
tableError = getInputParameter("ErrorTable");
chosendataProviderUnfold = getInputParameter(dataProviderUnfold);
chosendataProviderExtention = getInputParameter(dataProviderExtention);
chosendataProvider = getInputParameter(dataProvider);
outputErrortablename = getInputParameter("ErrorTableName");
String[] columnlist = columnnames.split(AlgorithmConfiguration
.getListSeparator());
speciesList = DatabaseFactory.executeSQLQuery("select " + columnlist[0]
+ " from " + tablename, dbconnection);
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
}
@Override
protected void process() throws Exception {
try {
fulfilParameters();
createTables();
int lenght = (int) (speciesList.size() / 3);
ArrayList<String> chunk1 = new ArrayList<String>();
ArrayList<String> chunk2 = new ArrayList<String>();
ArrayList<String> chunk3 = new ArrayList<String>();
for (int i = 0; i < speciesList.size(); i++) {
if (i < lenght)
chunk1.add((String) speciesList.get(i));
if (i >= lenght && i <= 2 * lenght)
chunk2.add((String) speciesList.get(i));
if (i > 2 * lenght)
chunk3.add((String) speciesList.get(i));
}
ThreadExtractionOccFromSPD t1 = new ThreadExtractionOccFromSPD(
chunk1, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
ThreadExtractionOccFromSPD t2 = new ThreadExtractionOccFromSPD(
chunk2, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
ThreadExtractionOccFromSPD t3 = new ThreadExtractionOccFromSPD(
chunk3, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
Thread th1 = new Thread(t1);
th1.start();
Thread th2 = new Thread(t2);
th2.start();
Thread th3 = new Thread(t3);
th3.start();
th1.join();
th2.join();
th3.join();
write("scientific_name; author; catalogue_number; citation; collection_gode; cordinate_uncertaninty_in_meters; country;"
+ "credits; family; id; institution_code; kingdom; locality; providere; latitude; longitude; max_depth; min_depth");
out.newLine();
insertInTheTable(t1.getInfo());
insertInTheTable(t2.getInfo());
insertInTheTable(t3.getInfo());
insertInTheTableErrors(t1.getErrors());
insertInTheTableErrors(t2.getErrors());
insertInTheTableErrors(t3.getErrors());
} catch (Exception e) {
e.printStackTrace();
throw e;
} finally {
DatabaseUtils.closeDBConnection(dbconnection);
out.close();
}
}
private void createTables() throws Exception {
// String s =
// " (scientific_name character varying, author character varying,catalogue_number character varying, citation character varying,"
// +
// "collection_gode character varying,cordinate_uncertaninty_in_meters character varying,country character varying,credits character varying ,family character varying, id character varying, institution_code character varying,kingdom character varying,"
// +
// "locality character varying,providere character varying ,latitude character varying, longitude character varying ,max_depth character varying ,min_depth character varying)";
//
// DatabaseFactory.executeSQLUpdate("create table " + outputtable + s,
// dbconnection);
DatabaseFactory.executeSQLUpdate("create table " + tableError
+ " (error character varying)", dbconnection);
}
private void insertInTheTable(ArrayList<ArrayList<String>> arrays)
throws Exception {
// AnalysisLogger.getLogger().debug("inside insertInTheTable ");
// String st =
// " (scientific_name, author,catalogue_number, citation,collection_gode,cordinate_uncertaninty_in_meters,country,"
// + "credits,family,id,"
// +
// " institution_code,kingdom,locality,providere,latitude, longitude,max_depth,min_depth)";
for (ArrayList<String> array : arrays) {
// String query = "insert into " + outputtable + st + " values (";
String writeString = new String();
int i = 0;
for (String s : array) {
if (i != 0) {
writeString = writeString + "; ";
}
// query = query + ", ";}
writeString = writeString + " '";
// query = query + " '";
if (s != null)
s = s.replace("'", "");
writeString = writeString + s;
// query = query + s;
// query = query + "'";
writeString = writeString + "'";
i++;
}
write(writeString);
out.newLine();
// query = query + ")";
// /* DatabaseFactory.executeSQLUpdate(query, dbconnection);*/
}
}
private void insertInTheTableErrors(ArrayList<String> arrays)
throws Exception {
if (arrays != null) {
String st = " (error)";
for (String er : arrays) {
String query = "insert into " + tableError + st + " values ('"
+ er + "')";
AnalysisLogger.getLogger().debug("query error : " + query);
DatabaseFactory.executeSQLUpdate(query, dbconnection);
}
}
}
public void print(ArrayList<ArrayList<String>> arrays) {
for (ArrayList<String> array : arrays) {
for (String s : array) {
AnalysisLogger.getLogger().debug(s);
}
}
}
@Override
public void shutdown() {
}
public void setDynamicParameter() {
AnalysisLogger.getLogger().debug("Inside setDynamicParameter");
ScopeProvider.instance.set("/gcube/devsec");
Manager manager = null;
manager = manager().build();
AnalysisLogger.getLogger().debug("costruito manager");
AnalysisLogger.getLogger().debug("prima dei plugin");
List<PluginDescription> plugin = manager.getPluginsDescription();
AnalysisLogger.getLogger().debug("preso i plugin");
// if(!dpHash.containsKey("ALL"))
// {
// dpHash.put("ALL", "ALL");
// dp.addEnum(DataProvidersType.class, "ALL");
// }
// if(!dpUHash.containsKey("ALL"))
// {
// dpUHash.put("ALL", "ALL");
// dpU.addEnum(UnfoldDPEnumType.class, "ALL");
// }
// if(!dpEHash.containsKey("ALL"))
// {
// dpEHash.put("ALL", "ALL");
// dpE.addEnum(ExtentionDPType.class, "ALL");
// }
dp.addEnum(DataProvidersType.class, "ALL");
//dpU.addEnum(UnfoldDPEnumType.class, "ALL");
dpE.addEnum(ExtentionDPType.class, "ALL");
dpU.addEnum(UnfoldDPEnumType.class, "NO OPTION");
dpE.addEnum(ExtentionDPType.class, "NO OPTION");
if (plugin != null) {
AnalysisLogger.getLogger().debug(
"*****PluginDescription is NOT null - length: "
+ plugin.size());
for (int i = 0; i < plugin.size(); i++) {
PluginDescription pluginDescription = plugin.get(i);
AnalysisLogger.getLogger().debug(
"For plugin ***"+pluginDescription.getName() );
Map<Capabilities, List<Conditions>> pluginCapabilities = pluginDescription
.getSupportedCapabilities();
AnalysisLogger.getLogger().debug(
"created maps");
AnalysisLogger.getLogger().debug(
" map size"+pluginCapabilities.size());
for (Entry<Capabilities, List<Conditions>> pluginCapability : pluginCapabilities
.entrySet()) {
Capabilities capability = pluginCapability.getKey();
AnalysisLogger.getLogger().debug(
capability.name().toString());
if(capability.name().toString().equals("Unfold"))
{
// if(!dpUHash.containsKey(pluginDescription.getName()))
// {
// AnalysisLogger.getLogger().debug("insert "+
// pluginDescription.getName().toString() + "fopr upfold");
// dpUHash.put(pluginDescription.getName(), pluginDescription.getName());
// dpU.addEnum(UnfoldDPEnumType.class,pluginDescription.getName().toString() );
// }
dpU.addEnum(UnfoldDPEnumType.class,pluginDescription.getName().toString() );
}
if(capability.name().toString().equals("Expansion"))
{
// if(!dpEHash.containsKey(pluginDescription.getName()))
// {
// AnalysisLogger.getLogger().debug("insert "+
// pluginDescription.getName().toString() + "fopr Expansion");
// dpEHash.put(pluginDescription.getName(), pluginDescription.getName());
// dpE.addEnum(ExtentionDPType.class,pluginDescription.getName().toString() );
// }
dpE.addEnum(ExtentionDPType.class,pluginDescription.getName().toString() );
}
if(capability.name().toString().equals("Occurrence"))
{
// if(!dpHash.containsKey(pluginDescription.getName()))
// {
// AnalysisLogger.getLogger().debug("insert "+
// pluginDescription.getName().toString() + "fopr Occurrence");
// dpHash.put(pluginDescription.getName() , pluginDescription.getName() );
// dp.addEnum(DataProvidersType.class,pluginDescription.getName().toString());
// }
dp.addEnum(DataProvidersType.class,pluginDescription.getName().toString());
}
}
}
} else
AnalysisLogger.getLogger().debug("*****PluginDescription is null");
}
@Override
protected void setInputParameters() {
try{
AnalysisLogger.getLogger().debug("inside setInputParameters ");
addRemoteDatabaseInput(databaseParameterName, urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
List<TableTemplates> templates = new ArrayList<TableTemplates>();
templates.add(TableTemplates.GENERIC);
InputTable tinput = new InputTable(templates, "SpeciesTable",
"The table containing the species information");
ColumnTypesList columns = new ColumnTypesList("SpeciesTable",
"SpeciesColumns", "Select the columns for species name", false);
addStringInput("OutputTableName", "The name of the output table",
"occ_");
addStringInput("ErrorTableName", "The name of the output table", "err_");
ServiceType randomstring = new ServiceType(
ServiceParameters.RANDOMSTRING, "OutputTable", "", "occ");
ServiceType randomstringErr = new ServiceType(
ServiceParameters.RANDOMSTRING, "ErrorTable", "", "err");
AnalysisLogger.getLogger().debug("before setDynamicParameter() ");
if(!call)
setDynamicParameter();
// try {
//// if (justcall == 0) {
//// justcall = 1;
//
//// }
// } catch (Throwable e) {
// e.printStackTrace();
// AnalysisLogger.getLogger().debug(e.toString());
// }
addEnumerateInput(DataProvidersType.values(), dataProvider,
"Choose Data Providere", "ALL");
AnalysisLogger.getLogger().debug("Dopo DataProvidersType");
addEnumerateInput(ExtentionDPType.values(),dataProviderExtention ,
"Choose Expand Option Data Providere","ALL");
AnalysisLogger.getLogger().debug("Dopo ExtentionDPType");
addEnumerateInput(UnfoldDPEnumType.values(), dataProviderUnfold,
"Choose UnfoldRR Option Data Providere","ALL");
AnalysisLogger.getLogger().debug("Dopo UnfoldDPEnumType");
inputs.add(tinput);
inputs.add(columns);
inputs.add(randomstring);
inputs.add(randomstringErr);
DatabaseType.addDefaultDBPars(inputs);
}catch (Throwable e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug(e.toString());
}
call=true;
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> outtemplate = new ArrayList<TableTemplates>();
outtemplate.add(TableTemplates.GENERIC);
List<TableTemplates> outtemplateErr = new ArrayList<TableTemplates>();
outtemplateErr.add(TableTemplates.GENERIC);
// OutputTable out = new OutputTable(outtemplate, outputtablename,
// outputtable, "The output table containing all the matches");
OutputTable outErr = new OutputTable(outtemplate, outputErrortablename,
tableError, "The output table containing all the matches");
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "OccFile", "OccFile");
map.put("Output", f);
// map.put("Output", out);
map.put("Errors", outErr);
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
// return out;
}
public void write(String writeSt) {
try {
out.write(writeSt);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
enum DataProvidersType {
}
class DataPenum extends DynamicEnum {
public Field[] getFields() {
Field[] fields = DataProvidersType.class.getDeclaredFields();
return fields;
}
}
enum ExtentionDPType {
}
class ExtentionDPEnum extends DynamicEnum {
public Field[] getFields() {
Field[] fields = ExtentionDPType.class.getDeclaredFields();
return fields;
}
}
enum UnfoldDPEnumType {
}
class UnfoldDPEnum extends DynamicEnum {
public Field[] getFields() {
Field[] fields = UnfoldDPEnumType.class.getDeclaredFields();
return fields;
}
}
}

View File

@ -0,0 +1,429 @@
package org.gcube.dataanalysis.JobSMspd;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.data.spd.client.proxies.Manager;
import org.gcube.data.spd.model.Conditions;
import org.gcube.data.spd.model.PluginDescription;
import org.gcube.data.spd.model.util.Capabilities;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.OutputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.ServiceType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.ServiceParameters;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.DynamicEnum;
import org.hibernate.SessionFactory;
public class TaxaProcedure extends StandardLocalExternalAlgorithm {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
static String databaseParameterName = "FishBase";
static String userParameterName = "user";
static String passwordParameterName = "password";
static String urlParameterName = "FishBase";
SessionFactory dbconnection = null;
public static boolean call=false;
String tablename;
String columnnames;
List<Object> speciesList;
protected String fileName;
BufferedWriter out;
String outputtablename;
String outputErrortablename;
String outputtable;
HashMap<String, String>dpHash= new HashMap<String, String>();
HashMap<String, String>dpUHash= new HashMap<String, String>();
HashMap<String, String>dpEHash= new HashMap<String, String>();
String tableError;
private DataPenum dp = new DataPenum();
private ExtentionDPEnum dpE = new ExtentionDPEnum();
private UnfoldDPEnum dpU = new UnfoldDPEnum();
private String dataProvider = "Data Provider :";
private String chosendataProvider = new String();
private String dataProviderExtention = "Data Provider (Expand Option):";
private String chosendataProviderExtention=new String();
//private String chosendataProviderUnfold="Data Provider Unfold:";
private String dataProviderUnfold = "Data Provider (Unfold Option):";
private String chosendataProviderUnfold=new String();
@Override
public String getDescription() {
return "An algorithm that once data provider and search options are selecteced, it has to return the taxon";
}
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("SMFaoAlg");
}
public void fulfilParameters() throws IOException {
config.setParam("DatabaseDriver", "org.postgresql.Driver");
dbconnection = DatabaseUtils.initDBSession(config);
tablename = getInputParameter("SpeciesTable");
columnnames = getInputParameter("SpeciesColumns");
outputtablename = getInputParameter("OutputTableName");
outputtable = getInputParameter("OutputTable");
tableError = getInputParameter("ErrorTable");
chosendataProviderUnfold = getInputParameter(dataProviderUnfold);
chosendataProviderExtention = getInputParameter(dataProviderExtention);
chosendataProvider = getInputParameter(dataProvider);
outputErrortablename = getInputParameter("ErrorTableName");
String[] columnlist = columnnames.split(AlgorithmConfiguration
.getListSeparator());
speciesList = DatabaseFactory.executeSQLQuery("select " + columnlist[0]
+ " from " + tablename, dbconnection);
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
}
@Override
protected void process() throws Exception {
try {
fulfilParameters();
createTables();
int lenght = (int) (speciesList.size() / 3);
ArrayList<String> chunk1 = new ArrayList<String>();
ArrayList<String> chunk2 = new ArrayList<String>();
ArrayList<String> chunk3 = new ArrayList<String>();
for (int i = 0; i < speciesList.size(); i++) {
if (i < lenght)
chunk1.add((String) speciesList.get(i));
if (i >= lenght && i <= 2 * lenght)
chunk2.add((String) speciesList.get(i));
if (i > 2 * lenght)
chunk3.add((String) speciesList.get(i));
}
ThreadExtractionTaxaFromSPD t1 = new ThreadExtractionTaxaFromSPD(
chunk1, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
ThreadExtractionTaxaFromSPD t2 = new ThreadExtractionTaxaFromSPD(
chunk2, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
ThreadExtractionTaxaFromSPD t3 = new ThreadExtractionTaxaFromSPD(
chunk3, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
Thread th1 = new Thread(t1);
th1.start();
Thread th2 = new Thread(t2);
th2.start();
Thread th3 = new Thread(t3);
th3.start();
th1.join();
th2.join();
th3.join();
write("scientific_name; author; citation; credits; id; lsid; providere; rank; ");
out.newLine();
insertInTheTable(t1.getInfo());
insertInTheTable(t2.getInfo());
insertInTheTable(t3.getInfo());
insertInTheTableErrors(t1.getErrors());
insertInTheTableErrors(t2.getErrors());
insertInTheTableErrors(t3.getErrors());
} catch (Exception e) {
e.printStackTrace();
throw e;
} finally {
DatabaseUtils.closeDBConnection(dbconnection);
out.close();
}
}
private void createTables() throws Exception {
DatabaseFactory.executeSQLUpdate("create table " + tableError
+ " (error character varying)", dbconnection);
}
private void insertInTheTable(ArrayList<ArrayList<String>> arrays) throws IOException{
for (ArrayList<String> array : arrays) {
// String query = "insert into " + outputtable + st + " values (";
String writeString = new String();
int i = 0;
for (String s : array) {
if (i != 0) {
writeString = writeString + "; ";
}
// query = query + ", ";}
writeString = writeString + " '";
// query = query + " '";
if (s != null)
s = s.replace("'", "");
writeString = writeString + s;
// query = query + s;
// query = query + "'";
writeString = writeString + "'";
i++;
}
write(writeString);
out.newLine();
// query = query + ")";
// /* DatabaseFactory.executeSQLUpdate(query, dbconnection);*/
}
}
private void insertInTheTableErrors(ArrayList<String> arrays)
throws Exception {
if (arrays != null) {
String st = " (error)";
for (String er : arrays) {
String query = "insert into " + tableError + st + " values ('"
+ er + "')";
AnalysisLogger.getLogger().debug("query error : " + query);
DatabaseFactory.executeSQLUpdate(query, dbconnection);
}
}
}
public void print(ArrayList<ArrayList<String>> arrays) {
for (ArrayList<String> array : arrays) {
for (String s : array) {
AnalysisLogger.getLogger().debug(s);
}
}
}
@Override
public void shutdown() {
}
public void setDynamicParameter() {
AnalysisLogger.getLogger().debug("Inside setDynamicParameter");
ScopeProvider.instance.set("/gcube/devsec");
Manager manager = null;
manager = manager().build();
AnalysisLogger.getLogger().debug("costruito manager");
AnalysisLogger.getLogger().debug("prima dei plugin");
List<PluginDescription> plugin = manager.getPluginsDescription();
AnalysisLogger.getLogger().debug("preso i plugin");
dp.addEnum(DataProvidersType.class, "ALL");
dpE.addEnum(ExtentionDPType.class, "ALL");
dpU.addEnum(UnfoldDPEnumType.class, "NO OPTION");
dpE.addEnum(ExtentionDPType.class, "NO OPTION");
if (plugin != null) {
AnalysisLogger.getLogger().debug(
"*****PluginDescription is NOT null - length: "
+ plugin.size());
for (int i = 0; i < plugin.size(); i++) {
PluginDescription pluginDescription = plugin.get(i);
AnalysisLogger.getLogger().debug(
"For plugin ***"+pluginDescription.getName() );
Map<Capabilities, List<Conditions>> pluginCapabilities = pluginDescription
.getSupportedCapabilities();
AnalysisLogger.getLogger().debug(
"created maps");
AnalysisLogger.getLogger().debug(
" map size"+pluginCapabilities.size());
for (Entry<Capabilities, List<Conditions>> pluginCapability : pluginCapabilities
.entrySet()) {
Capabilities capability = pluginCapability.getKey();
AnalysisLogger.getLogger().debug(
capability.name().toString());
if(capability.name().toString().equals("Unfold"))
{
dpU.addEnum(UnfoldDPEnumType.class,pluginDescription.getName().toString() );
}
if(capability.name().toString().equals("Expansion"))
{
dpE.addEnum(ExtentionDPType.class,pluginDescription.getName().toString() );
}
if(capability.name().toString().equals("Classification"))
{
dp.addEnum(DataProvidersType.class,pluginDescription.getName().toString());
}
}
}
} else
AnalysisLogger.getLogger().debug("*****PluginDescription is null");
}
@Override
protected void setInputParameters() {
try{
AnalysisLogger.getLogger().debug("inside setInputParameters ");
addRemoteDatabaseInput(databaseParameterName, urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
List<TableTemplates> templates = new ArrayList<TableTemplates>();
templates.add(TableTemplates.GENERIC);
InputTable tinput = new InputTable(templates, "SpeciesTable",
"The table containing the species information");
ColumnTypesList columns = new ColumnTypesList("SpeciesTable",
"SpeciesColumns", "Select the columns for species name", false);
addStringInput("OutputTableName", "The name of the output table",
"occ_");
addStringInput("ErrorTableName", "The name of the output table", "err_");
ServiceType randomstring = new ServiceType(
ServiceParameters.RANDOMSTRING, "OutputTable", "", "occ");
ServiceType randomstringErr = new ServiceType(
ServiceParameters.RANDOMSTRING, "ErrorTable", "", "err");
AnalysisLogger.getLogger().debug("before setDynamicParameter() ");
if(!call)
setDynamicParameter();
// try {
//// if (justcall == 0) {
//// justcall = 1;
//
//// }
// } catch (Throwable e) {
// e.printStackTrace();
// AnalysisLogger.getLogger().debug(e.toString());
// }
addEnumerateInput(DataProvidersType.values(), dataProvider,
"Choose Data Providere", "ALL");
AnalysisLogger.getLogger().debug("Dopo DataProvidersType");
addEnumerateInput(ExtentionDPType.values(),dataProviderExtention ,
"Choose Expand Option Data Providere","ALL");
AnalysisLogger.getLogger().debug("Dopo ExtentionDPType");
addEnumerateInput(UnfoldDPEnumType.values(), dataProviderUnfold,
"Choose Unfold Option Data Providere","ALL");
AnalysisLogger.getLogger().debug("Dopo UnfoldDPEnumType");
inputs.add(tinput);
inputs.add(columns);
inputs.add(randomstring);
inputs.add(randomstringErr);
DatabaseType.addDefaultDBPars(inputs);
}catch (Throwable e) {
e.printStackTrace();
AnalysisLogger.getLogger().debug(e.toString());
}
call=true;
}
@Override
public StatisticalType getOutput() {
List<TableTemplates> outtemplate = new ArrayList<TableTemplates>();
outtemplate.add(TableTemplates.GENERIC);
List<TableTemplates> outtemplateErr = new ArrayList<TableTemplates>();
outtemplateErr.add(TableTemplates.GENERIC);
// OutputTable out = new OutputTable(outtemplate, outputtablename,
// outputtable, "The output table containing all the matches");
OutputTable outErr = new OutputTable(outtemplate, outputErrortablename,
tableError, "The output table containing all the matches");
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "OccFile", "OccFile");
map.put("Output", f);
// map.put("Output", out);
map.put("Errors", outErr);
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
// return out;
}
public void write(String writeSt) {
try {
out.write(writeSt);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
enum DataProvidersType {
}
class DataPenum extends DynamicEnum {
public Field[] getFields() {
Field[] fields = DataProvidersType.class.getDeclaredFields();
return fields;
}
}
enum ExtentionDPType {
}
class ExtentionDPEnum extends DynamicEnum {
public Field[] getFields() {
Field[] fields = ExtentionDPType.class.getDeclaredFields();
return fields;
}
}
enum UnfoldDPEnumType {
}
class UnfoldDPEnum extends DynamicEnum {
public Field[] getFields() {
Field[] fields = UnfoldDPEnumType.class.getDeclaredFields();
return fields;
}
}
}

View File

@ -0,0 +1,160 @@
package org.gcube.dataanalysis.JobSMspd;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager;
import java.util.ArrayList;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.data.spd.client.proxies.Manager;
import org.gcube.data.spd.model.products.OccurrencePoint;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.streams.Stream;
public class ThreadExtractionOccFromSPD implements Runnable {
private ArrayList<String> chunk;
private ArrayList<ArrayList<String>> informations;
private ArrayList<String> errors;
private String dataProvider;
private String dataProviderUnfold;
private String dataProviderExpand;
public ThreadExtractionOccFromSPD(ArrayList<String> chunk, String dataProvider,String dataProviderExpand,String dataProviderUnfold ) {
this.chunk = chunk;
for (String species : chunk) {
System.out.println(species);
// AnalysisLogger.getLogger().debug(species);
}
this.dataProvider=dataProvider;
this.dataProviderExpand= dataProviderExpand;
this.dataProviderUnfold=dataProviderUnfold;
informations = new ArrayList<ArrayList<String>>();
errors= new ArrayList<String>();
}
public void run() {
ScopeProvider.instance.set("/gcube/devsec");
Manager manager=null;
try{
manager = manager().build();
for (String species : chunk) {
if (species != null) {
String query = new String();
// if(dataProviderExpand.equals("NO OPTION"))
// query= "SEARCH BY SN '"+species + "' RETURN occurrence";
// else
// query= "SEARCH BY SN '"+species + "' EXPAND WITH CatalogueOfLife RETURN occurrence";
// System.out.println(query);
query=createQueryParameter(species);
AnalysisLogger.getLogger().debug("QUERY *******: "+query);
Stream<ResultElement> stream;
try {
stream = manager.search(query);
int i=0;
while (stream.hasNext()) {
i++;
OccurrencePoint ti = (OccurrencePoint) stream.next();
informations.add(crateRowTable(ti));
}
if(i==0)
{
errors.add(species+" not found.");
}
} catch (Exception e) {
errors.add("Exception on "+species+" :"+ e.getMessage());
e.printStackTrace();
}
}
}
}catch(Throwable e){
e.printStackTrace();
AnalysisLogger.getLogger().debug("An error occurred: "+e.getMessage());
}
}
private String createQueryParameter(String species)
{
String query= "SEARCH BY SN '"+species +"'";
String where=new String();
String expand=new String();
String unfold=new String();
if(dataProvider.equals("ALL"))
where="";
else
where=" IN "+dataProvider;
if(dataProviderUnfold.equals("NO OPTION"))
unfold="";
else
unfold=" UNFOLD WITH "+dataProviderUnfold;
query= query +unfold;
if(dataProviderExpand.equals("ALL"))
expand="EXPAND";
{if(dataProviderExpand.equals("NO OPTION"))
expand="";
else
expand=" EXPAND WITH "+dataProviderExpand;
}
query= query+ expand;
if(!expand.equals("")& !dataProviderExpand.equals("NO OPTION") )
query=query+ where;
query= query +" RETURN occurrence";
return query;
}
private ArrayList<String> crateRowTable(OccurrencePoint p)
{
ArrayList<String> infoOcc= new ArrayList<String>();
infoOcc.add(p.getScientificName());
infoOcc.add(p.getAuthor());
infoOcc.add(p.getCatalogueNumber());
infoOcc.add(p.getCitation());
infoOcc.add(p.getCollectionCode());
infoOcc.add(p.getCoordinateUncertaintyInMeters());
infoOcc.add(p.getCountry());
infoOcc.add(p.getCredits());
infoOcc.add(p.getFamily());
infoOcc.add(p.getId());
infoOcc.add(p.getInstitutionCode());
infoOcc.add(p.getKingdom());
infoOcc.add(p.getLocality());
infoOcc.add(p.getProvider());
infoOcc.add((Double.toString(p.getDecimalLatitude())));
infoOcc.add((Double.toString(p.getDecimalLongitude())));
infoOcc.add((Double.toString(p.getMaxDepth())));
infoOcc.add((Double.toString(p.getMinDepth())));
return infoOcc;
}
public ArrayList<ArrayList<String>> getInfo()
{
return informations;
}
public ArrayList<String> getErrors()
{
return errors;
}
}

View File

@ -0,0 +1,151 @@
package org.gcube.dataanalysis.JobSMspd;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager;
import java.util.ArrayList;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.data.spd.client.proxies.Manager;
import org.gcube.data.spd.model.products.OccurrencePoint;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.model.products.TaxonomyItem;
import org.gcube.data.streams.Stream;
public class ThreadExtractionTaxaFromSPD implements Runnable {
private ArrayList<String> chunk;
private ArrayList<ArrayList<String>> informations;
private ArrayList<String> errors;
private String dataProvider;
private String dataProviderUnfold;
private String dataProviderExpand;
public ThreadExtractionTaxaFromSPD(ArrayList<String> chunk, String dataProvider,String dataProviderExpand,String dataProviderUnfold ) {
this.chunk = chunk;
for (String species : chunk) {
System.out.println(species);
// AnalysisLogger.getLogger().debug(species);
}
this.dataProvider=dataProvider;
this.dataProviderExpand= dataProviderExpand;
this.dataProviderUnfold=dataProviderUnfold;
informations = new ArrayList<ArrayList<String>>();
errors= new ArrayList<String>();
}
public void run() {
ScopeProvider.instance.set("/gcube/devsec");
Manager manager=null;
try{
manager = manager().build();
for (String species : chunk) {
if (species != null) {
String query = new String();
// if(dataProviderExpand.equals("NO OPTION"))
// query= "SEARCH BY SN '"+species + "' RETURN occurrence";
// else
// query= "SEARCH BY SN '"+species + "' EXPAND WITH CatalogueOfLife RETURN occurrence";
// System.out.println(query);
query=createQueryParameter(species);
AnalysisLogger.getLogger().debug("QUERY *******: "+query);
Stream<ResultElement> stream;
try {
stream = manager.search(query);
int i=0;
while (stream.hasNext()) {
i++;
TaxonomyItem ti = (TaxonomyItem) stream.next();
informations.add(crateRowTable(ti));
}
if(i==0)
{
errors.add(species+" not found.");
}
} catch (Exception e) {
errors.add("Exception on "+species+" :"+ e.getMessage());
e.printStackTrace();
}
}
}
}catch(Throwable e){
e.printStackTrace();
AnalysisLogger.getLogger().debug("An error occurred: "+e.getMessage());
}
}
private String createQueryParameter(String species)
{
String query= "SEARCH BY SN '"+species +"'";
String where=new String();
String expand=new String();
String unfold=new String();
if(dataProvider.equals("ALL"))
where="";
else
where=" IN "+dataProvider;
if(dataProviderUnfold.equals("NO OPTION"))
unfold="";
else
unfold=" UNFOLD WITH "+dataProviderUnfold;
query= query +unfold;
if(dataProviderExpand.equals("ALL"))
expand="EXPAND";
{if(dataProviderExpand.equals("NO OPTION"))
expand="";
else
expand=" EXPAND WITH "+dataProviderExpand;
}
query= query+ expand;
if(!expand.equals("")& !dataProviderExpand.equals("NO OPTION") )
query=query+ where;
query= query +" RETURN TAXON";
return query;
}
private ArrayList<String> crateRowTable(TaxonomyItem p)
{
ArrayList<String> infoOcc= new ArrayList<String>();
infoOcc.add(p.getScientificName());
infoOcc.add(p.getAuthor());
infoOcc.add(p.getCitation());
infoOcc.add(p.getCredits());
infoOcc.add(p.getId());
infoOcc.add(p.getLsid());
infoOcc.add(p.getProvider());
infoOcc.add(p.getRank());
return infoOcc;
}
public ArrayList<ArrayList<String>> getInfo()
{
return informations;
}
public ArrayList<String> getErrors()
{
return errors;
}
}

View File

@ -0,0 +1,186 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.jfree.chart.JFreeChart;
import org.jfree.data.category.DefaultCategoryDataset;
import org.postgresql.Driver;
public class AbsoluteSpeciesBarChartsAlgorithm extends
StandardLocalExternalAlgorithm {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
protected String fileName;
BufferedWriter out;
private String firstSpeciesNumber = " SpeciesNumber :";
private String yearStart = "Start year :";
private String yearEnd = "End year :";
private int speciesNumber;
String databaseJdbc;
String year_start;
String year_end;
String databaseUser;
String databasePwd;
private Connection connection = null;
private DefaultCategoryDataset defaultcategorydataset;
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Initialization");
}
@Override
public String getDescription() {
return "An algorithm producing a bar chart for the most observed species in a certain years range (with respect to the OBIS database)";
}
public void fulfilParameters() {
String tmp = getInputParameter(firstSpeciesNumber);
speciesNumber = Integer.parseInt(tmp);
databaseJdbc = getInputParameter(urlParameterName);
year_start = getInputParameter(yearStart);
year_end = getInputParameter(yearEnd);
databaseUser = getInputParameter(userParameterName);
databasePwd = getInputParameter(passwordParameterName);
fileName = super.config.getPersistencePath() + "results.csv";
}
private ResultSet performeQuery() throws SQLException {
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
Statement stmt = connection.createStatement();
String query = "SELECT tname, sum(count)AS count FROM public.count_species_per_year WHERE year::integer >="
+ year_start
+ "AND year::integer <="
+ year_end
+ "GROUP BY tname ORDER BY count desc;";
return stmt.executeQuery(query);
}
@Override
protected void process() throws Exception {
defaultcategorydataset = new DefaultCategoryDataset();
String driverName = "org.postgresql.Driver";
Class driverClass = Class.forName(driverName);
Driver driver = (Driver) driverClass.newInstance();
fulfilParameters();
out = new BufferedWriter(new FileWriter(fileName));
ResultSet rs = performeQuery();
int i = 0;
String s = "Species";
while (rs.next() && i < speciesNumber) {
String tname = rs.getString("tname");
String count = rs.getString("count");
write(tname+","+count);
int countOcc = Integer.parseInt(count);
PrimitiveType val = new PrimitiveType(String.class.getName(),
count, PrimitiveTypes.STRING, tname, tname);
if(i<100)
map.put(tname, val);
if (i < 16)
defaultcategorydataset.addValue(countOcc, s, tname);
i++;
}
out.close();
connection.close();
}
@Override
protected void setInputParameters() {
addStringInput(
firstSpeciesNumber,
"Number of species to report (max 17 will be visualized on the chart)",
"10");
addStringInput(yearStart, "Starting year of the analysis", "1800");
addStringInput(yearEnd, "Ending year of the analysis", "2020");
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(),
PrimitiveType.stringMap2StatisticalMap(outputParameters),
PrimitiveTypes.MAP, "Discrepancy Analysis", "");
AnalysisLogger
.getLogger()
.debug("MapsComparator: Producing Gaussian Distribution for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = HistogramGraph
.createStaticChart(defaultcategorydataset);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Species Observations", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
"Graphical representation of the error spread");
// PrimitiveType images = new PrimitiveType("Species Observations",
// producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
// "Graphical representation of the error spread");
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "OccFile", "OccFile");
// end build image
AnalysisLogger.getLogger().debug(
"Bar Charts Species Occurrences Produced");
// collect all the outputs
map.put("File", f);
map.put("Result", p);
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
public void write(String writeSt) {
try {
out.write(writeSt);
out.newLine();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,6 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
public enum AreaEnum {
LME,
MEOW
}

View File

@ -0,0 +1,137 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.jfree.chart.JFreeChart;
import org.jfree.data.category.DefaultCategoryDataset;
import org.postgresql.Driver;
public class SimpleAlgorithm extends StandardLocalExternalAlgorithm{
//case of db used
static String urlParameterName = "DatabaseURL";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
Class driverClass ;
Driver driver ;
Connection connection = null;
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
private DefaultCategoryDataset defaultcategorydataset;
private String species="Species";
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Initialization");
String driverName = "org.postgresql.Driver";
driverClass = Class.forName(driverName);
driver = (Driver) driverClass.newInstance();
}
@Override
public String getDescription() {
// TODO Auto-generated method stub
return null;
}
@Override
protected void process() throws Exception {
String userSelectedSp = getInputParameter(species);
String databaseJdbc = getInputParameter(urlParameterName);
String databaseUser = getInputParameter(userParameterName);
String databasePwd = getInputParameter(passwordParameterName);
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
Statement stmt = connection.createStatement();
String query = "SELECT numberOfObservation FROM public.count_species_per_year WHERE tname ="
+ species;
ResultSet rs = stmt.executeQuery(query);
int i =0;
String s = "Species";
while (rs.next()) {
String count = rs.getString("numberOfObservation");
int countOcc=Integer.parseInt(count);
PrimitiveType val = new PrimitiveType(String.class.getName(), count, PrimitiveTypes.STRING, species, species);
map.put(species, val);
if(i<16)
defaultcategorydataset.addValue(countOcc,s,species);
else
break;
i++;
}
connection.close();
}
@Override
protected void setInputParameters() {
addStringInput(species, "Slected species", "Solea solea");
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
try {
connection.close();
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), PrimitiveType.stringMap2StatisticalMap(outputParameters), PrimitiveTypes.MAP, "Discrepancy Analysis","");
AnalysisLogger.getLogger().debug("MapsComparator: Producing Gaussian Distribution for the errors");
//build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = HistogramGraph.createStaticChart(defaultcategorydataset);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Species Observations", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(), producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation", "Graphical representation of the error spread");
//end build image
AnalysisLogger.getLogger().debug("Bar Charts Species Occurrences Produced");
//collect all the outputs
map.put("Result", p);
map.put("Images", images);
//generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
}

View File

@ -0,0 +1,195 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.jfree.chart.JFreeChart;
import org.jfree.data.category.DefaultCategoryDataset;
import org.postgresql.Driver;
public class SpeciesObservationsAreaBarChart extends
StandardLocalExternalAlgorithm {
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
private String species = "Species :";
private String yearStart = "Start year :";
private String yearEnd = "End year :";
private String area = "Area :";
private String selectedSpecies;
private DefaultCategoryDataset defaultcategorydataset;
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
String databaseJdbc;
String year_start;
String year_end ;
String databaseUser ;
String chosenArea ;
String databasePwd ;
String table ;
String areaName;
Connection connection = null;
protected String fileName;
BufferedWriter out;
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug(
"SpeciesObservationsAreaBarChart Initialization");
}
@Override
public String getDescription() {
return "An algorithm producing a bar chart for the distribution of a species along a certain type of marine area (e.g. LME or MEOW)";
}
public void fulfilParameters() throws IOException {
databaseJdbc = getInputParameter(urlParameterName);
year_start = getInputParameter(yearStart);
year_end = getInputParameter(yearEnd);
selectedSpecies = getInputParameter(species);
databaseUser = getInputParameter(userParameterName);
chosenArea = getInputParameter(area);
databasePwd = getInputParameter(passwordParameterName);
table = "count_species_per_lme_per_year";
areaName = "lme_name";
if (chosenArea.equals("LME")) {
table = "count_species_per_lme_per_year";
areaName = "lme_name";
} else if (chosenArea.equals("MEOW")) {
table = "count_species_per_meow_per_year";
areaName = "ecoregion";
}
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
}
private ResultSet performeQuery() throws SQLException {
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
Statement stmt = connection.createStatement();
String query = "SELECT tname," + areaName
+ " ,sum(count) AS count FROM " + table + " WHERE upper(tname) like upper('"
+ selectedSpecies + "') AND year::integer >=" + year_start
+ "AND year::integer <=" + year_end
+ "GROUP BY tname ,"+areaName+" ORDER BY count desc;";
return stmt.executeQuery(query);
}
@Override
protected void process() throws Exception {
defaultcategorydataset = new DefaultCategoryDataset();
String driverName = "org.postgresql.Driver";
Class driverClass = Class.forName(driverName);
Driver driver = (Driver) driverClass.newInstance();
fulfilParameters() ;
ResultSet rs = performeQuery() ;
String s = selectedSpecies;
while (rs.next()) {
String ar = rs.getString(areaName);
String count = rs.getString("count");
PrimitiveType val = new PrimitiveType(String.class.getName(), count, PrimitiveTypes.STRING, ar, ar);
write(ar+","+count);
map.put(ar, val);
int countOcc = Integer.parseInt(count);
defaultcategorydataset.addValue(countOcc, s, ar);
}
connection.close();
out.close();
}
@Override
protected void setInputParameters() {
addStringInput(species, "The species to analyze", "");
addEnumerateInput(AreaEnum.values(), area, "Choose the area type",
AreaEnum.LME.name());
addStringInput(yearStart, "Starting year of the analysis", "1800");
addStringInput(yearEnd, " Ending year of the analysis", "2020");
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(),
PrimitiveType.stringMap2StatisticalMap(outputParameters),
PrimitiveTypes.MAP, "", "");
AnalysisLogger.getLogger().debug(
"MapsComparator: Producing Bar Chart for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = HistogramGraph
.createStaticChart(defaultcategorydataset);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Most observed species", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
producedImages, PrimitiveTypes.IMAGES, "Most observed species",
"Most observed species");
// end build image
AnalysisLogger.getLogger().debug(
"Bar Charts Species Occurrences Produced");
// collect all the outputs
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "Species observations per area", "ObsFile");
map.put("Output",f);
map.put("Result", p);
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
public void write(String writeSt) {
try {
out.write(writeSt);
out.newLine();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,263 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.utils.DynamicEnum;
import org.jfree.chart.JFreeChart;
import org.jfree.data.time.TimeSeries;
import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.time.Year;
import org.postgresql.Driver;
public class SpeciesObservationsPerLMEAreaPerYearLineChart extends StandardLocalExternalAlgorithm {
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
private String yearStart = "Start Year";
private String yearEnd = "End Year";
private Hashtable areaTable= new Hashtable<String , String>();
private static String area = "AreaType";
private String choseArea = "Area Name:";
private String selectedAreaName;
static int justcall=0;
private String[] speciesNames;
LMEenum enuArea=new LMEenum();
private TimeSeriesCollection dataset;
protected String fileName;
BufferedWriter out;
Connection connection = null;
String year_start;
String year_end ;
String table = "count_species_per_lme_per_year";
String areaName = "lme_name";
String databaseJdbc=new String() ;
String databaseUser=new String() ;
String databasePwd =new String();
@Override
protected void setInputParameters() {
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
if(justcall==0)
{ justcall=1;
try {
queryArea(getStaticConnection());
} catch (Exception e) {
e.printStackTrace();
}
}
addEnumerateInput(LMEenumType.values(), area, "Choose the area name",
Util.formatAreaName("NORTH SEA"));
//addStringInput(choseArea,"Choose the area name","");
addStringInput(yearStart, "Starting year of the analysis", "1800");
addStringInput(yearEnd, "Ending year of the analysis", "2020");
PrimitiveTypesList speciesSelected = new PrimitiveTypesList(String.class.getName(),PrimitiveTypes.STRING, "Selected species", "List of the species to analyze", false);
super.inputs.add(speciesSelected);
}
//to do: change
public void queryArea(Connection connection) throws SQLException
{
AnalysisLogger.getLogger().debug("call queryArea");
String query= "select distinct(upper(lme_name)) as lme_name from geo.lme order by lme_name";
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query);
while (rs.next()) {
String area=rs.getString("lme_name");
AnalysisLogger.getLogger().debug(area);
//areaTable.put(Util.formatAreaName(area), area);
enuArea.addEnum(LMEenumType.class, area);
}
connection.close();
}
public Connection getStaticConnection() throws SQLException, ClassNotFoundException, InstantiationException, IllegalAccessException
{
Class.forName("org.postgresql.Driver");
Connection connection = null;
connection = DriverManager.getConnection(
"jdbc:postgresql://obis2.i-marine.research-infrastructures.eu/obis","postgres", "0b1s@d4sc13nc3");
return connection;
}
@Override
public void shutdown() {
// TODO Auto-generated method stub
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(),
PrimitiveType.stringMap2StatisticalMap(outputParameters),
PrimitiveTypes.MAP, "", "");
AnalysisLogger
.getLogger()
.debug("MapsComparator: Producing Line Chart for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = TimeSeriesGraph.createStaticChart(dataset, "yyyy");
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Selected species observations per LME area("+ selectedAreaName+")", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
"Selected species observations per LME area ("+ selectedAreaName+")");
// end build image
AnalysisLogger.getLogger().debug(
"Line Taxonomy Occurrences Produced");
// collect all the outputs
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "Species observations per area", "ObsFile");
map.put("Output",f);
map.put("Result", p);
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug(
"Initialization SpeciesObservationsPerAreaPerYearLineChart");
}
@Override
public String getDescription() {
return "Algorithm returning most observed species in a specific years range (data collected from OBIS database).";
}
public void fulfilParameters() throws IOException {
dataset = new TimeSeriesCollection();
year_start = getInputParameter(yearStart);
year_end = getInputParameter(yearEnd);
table = "count_species_per_lme_per_year";
areaName = "lme_name";
selectedAreaName=getInputParameter(area);
AnalysisLogger.getLogger().debug("*********NAMEE*******"+selectedAreaName);
databaseJdbc = getInputParameter(urlParameterName);
databaseUser = getInputParameter(userParameterName);
databasePwd = getInputParameter(passwordParameterName);
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
}
@Override
protected void process() throws Exception {
AnalysisLogger.getLogger().debug("Starto to process");
String driverName = "org.postgresql.Driver";
Class driverClass = Class.forName(driverName);
Driver driver = (Driver) driverClass.newInstance();
fulfilParameters();
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
Statement stmt = connection.createStatement();
speciesNames = config.getParam("Selected species").split(AlgorithmConfiguration.getListSeparator());
for (String spe : speciesNames) {
String query = "select lme_name,year,count from "+table+" where upper(tname) like upper('"+
spe+ "') and upper(lme_name) like '"+selectedAreaName+"' and year::integer >="
+ year_start
+ "AND year::integer <="
+ year_end +" order by year;";
AnalysisLogger.getLogger().debug(query);
ResultSet rs = stmt.executeQuery(query);
final TimeSeries series = new TimeSeries(spe);
while (rs.next()) {
if (rs.getString("year") != null) {
AnalysisLogger.getLogger().debug(rs.getString("year")+" count "+ rs.getString("count"));
int year = Integer.parseInt(rs.getString("year"));
int count = Integer.parseInt(rs.getString("count"));
write(spe+","+year+","+count);
series.add(new Year(year), count);
}
}
dataset.addSeries(series);
}
AnalysisLogger.getLogger().debug(dataset.toString());
connection.close();
out.close();
}
enum LMEenumType {}
class LMEenum extends DynamicEnum{
public Field[] getFields() {
Field[] fields = LMEenumType.class.getDeclaredFields();
return fields;
}
}
public void write(String writeSt) {
try {
out.write(writeSt);
out.newLine();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,259 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.utils.DynamicEnum;
import org.jfree.chart.JFreeChart;
import org.jfree.data.time.TimeSeries;
import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.time.Year;
import org.postgresql.Driver;
public class SpeciesObservationsPerMEOWAreaPerYearLineChart extends StandardLocalExternalAlgorithm {
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
private String yearStart = "Start Year";
private String yearEnd = "End Year";
private Hashtable areaTable= new Hashtable<String , String>();
private static String area = "AreaType";
private String choseArea = "Area Name:";
private String selectedAreaName;
static int justcall=0;
private String[] speciesNames;
MEOWenum enuArea=new MEOWenum();
private TimeSeriesCollection dataset;
protected String fileName;
BufferedWriter out;
Connection connection = null;
String year_start ;
String year_end ;
String table = "count_species_per_meow_per_year";
String areaName = "ecoregion";
String databaseJdbc;
String databaseUser ;
String databasePwd;
@Override
protected void setInputParameters() {
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
if(justcall==0)
{ justcall=1;
try {
queryArea(getStaticConnection());
} catch (Exception e) {
e.printStackTrace();
}
}
addEnumerateInput(MEOWEnumType.values(), area, "Choose the area name",
Util.formatAreaName("AGULHAS BANK"));
//addStringInput(choseArea,"Choose the area name","");
addStringInput(yearStart, "Starting year of the analysis", "1800");
addStringInput(yearEnd, "Ending year of the analysis", "2020");
PrimitiveTypesList speciesSelected = new PrimitiveTypesList(String.class.getName(),PrimitiveTypes.STRING, "Selected species", "List of the species to analyze", false);
super.inputs.add(speciesSelected);
}
//to do: change
public void queryArea(Connection connection) throws SQLException
{
AnalysisLogger.getLogger().debug("call queryArea");
String query= "select upper(ecoregion)as ecoregion from geo.meow order by ecoregion";
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query);
while (rs.next()) {
String area=rs.getString("ecoregion");
AnalysisLogger.getLogger().debug(area);
//areaTable.put(Util.formatAreaName(area), area);
enuArea.addEnum(MEOWEnumType.class, area);
}
connection.close();
}
public Connection getStaticConnection() throws SQLException, ClassNotFoundException, InstantiationException, IllegalAccessException
{
Class.forName("org.postgresql.Driver");
Connection connection = null;
connection = DriverManager.getConnection(
"jdbc:postgresql://obis2.i-marine.research-infrastructures.eu/obis","postgres", "0b1s@d4sc13nc3");
return connection;
}
@Override
public void shutdown() {
// TODO Auto-generated method stub
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(),
PrimitiveType.stringMap2StatisticalMap(outputParameters),
PrimitiveTypes.MAP, "", "");
AnalysisLogger
.getLogger()
.debug("MapsComparator: Producing Line Chart for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = TimeSeriesGraph.createStaticChart(dataset, "yyyy");
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Selected species observations per MEOW area ("+ selectedAreaName+")", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
"Selected species observations per MEOW area("+ selectedAreaName+")");
// end build image
AnalysisLogger.getLogger().debug(
"Line Taxonomy Occurrences Produced");
// collect all the outputs
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "Species observations per area", "ObsFile");
map.put("Output",f);
map.put("Result", p);
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug(
"Initialization SpeciesObservationsPerAreaPerYearLineChart");
}
@Override
public String getDescription() {
return "Algorithm returning most observed species in a specific years range (data collected from OBIS database).";
}
public void fulfilParameters() throws Exception {
String driverName = "org.postgresql.Driver";
Class driverClass = Class.forName(driverName);
Driver driver = (Driver) driverClass.newInstance();
dataset = new TimeSeriesCollection();
year_start = getInputParameter(yearStart);
year_end = getInputParameter(yearEnd);
selectedAreaName=getInputParameter(area);
AnalysisLogger.getLogger().debug("*********NAMEE*******"+selectedAreaName);
databaseJdbc = getInputParameter(urlParameterName);
databaseUser = getInputParameter(userParameterName);
databasePwd = getInputParameter(passwordParameterName);
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
}
@Override
protected void process() throws Exception {
AnalysisLogger.getLogger().debug("Start to process");
fulfilParameters();
Statement stmt = connection.createStatement();
speciesNames = config.getParam("Selected species").split(AlgorithmConfiguration.getListSeparator());
for (String spe : speciesNames) {
String query = "select ecoregion ,year,count from "+table+" where upper(tname) like upper('"+
spe+ "') and upper(ecoregion) like '"+selectedAreaName+"' and year::integer >="
+ year_start
+ "AND year::integer <="
+ year_end +" order by year;";
AnalysisLogger.getLogger().debug(query);
ResultSet rs = stmt.executeQuery(query);
final TimeSeries series = new TimeSeries(spe);
while (rs.next()) {
if (rs.getString("year") != null) {
AnalysisLogger.getLogger().debug(rs.getString("year")+" count "+ rs.getString("count"));
int year = Integer.parseInt(rs.getString("year"));
int count = Integer.parseInt(rs.getString("count"));
write(spe+","+year+","+count);
series.add(new Year(year), count);
}
}
dataset.addSeries(series);
}
AnalysisLogger.getLogger().debug(dataset.toString());
connection.close();
out.close();
}
enum MEOWEnumType {}
class MEOWenum extends DynamicEnum{
public Field[] getFields() {
Field[] fields = MEOWEnumType.class.getDeclaredFields();
return fields;
}}
public void write(String writeSt) {
try {
out.write(writeSt);
out.newLine();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,171 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.plot.ThermometerPlot;
import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.time.Minute;
import org.jfree.data.time.TimeSeries;
import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.time.Year;
import org.postgresql.Driver;
public class SpeciesObservationsPerYear extends StandardLocalExternalAlgorithm {
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
private String yearStart = "Start Year :";
private String yearEnd = "End Year :";
private String[] speciesNames;
private TimeSeriesCollection dataset;
protected String fileName;
BufferedWriter out;
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug(
"Initialization SpeciesObservationsPerYear");
}
@Override
public String getDescription() {
return "An algorithm producing the trend of the observations for a certain species in a certain years range.";
}
@Override
protected void process() throws Exception {
dataset = new TimeSeriesCollection();
String driverName = "org.postgresql.Driver";
AnalysisLogger.getLogger().debug("Inside process ");
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
Class driverClass = Class.forName(driverName);
Driver driver = (Driver) driverClass.newInstance();
String databaseJdbc = getInputParameter(urlParameterName);
String year_start = getInputParameter(yearStart);
String year_end = getInputParameter(yearEnd);
String databaseUser = getInputParameter(userParameterName);
String databasePwd = getInputParameter(passwordParameterName);
Connection connection = null;
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
Statement stmt = connection.createStatement();
speciesNames = config.getParam("Selected species").split(AlgorithmConfiguration.getListSeparator());
for (String sp : speciesNames) {
AnalysisLogger.getLogger().debug("Species: " + sp);
String query = "select tname,year,count from public.count_species_per_year where upper(tname) like upper('"
+ sp + "') and year::integer >"
+ year_start
+ "AND year::integer <"
+ year_end +" order by year;";
ResultSet rs = stmt.executeQuery(query);
final TimeSeries series = new TimeSeries(sp);
while (rs.next()) {
if (rs.getString("year") != null) {
int year = Integer.parseInt(rs.getString("year"));
int count = Integer.parseInt(rs.getString("count"));
out.write(sp+","+year+","+count);
out.newLine();
series.add(new Year(year), count);
}
}
dataset.addSeries(series);
}
AnalysisLogger.getLogger().debug(dataset.toString());
out.close();
connection.close();
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(),
PrimitiveType.stringMap2StatisticalMap(outputParameters),
PrimitiveTypes.MAP, " ", "");
AnalysisLogger
.getLogger()
.debug("MapsComparator: Producing Line Chart for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = TimeSeriesGraph.createStaticChart(dataset, "yyyy");
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Species observations per year", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
"Species observations per year");
// end build image
AnalysisLogger.getLogger().debug(
"Line Species Occurrences Produced");
// collect all the outputs
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "Species observations per area", "ObsFile");
map.put("Output",f);
map.put("Result", p);
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
@Override
protected void setInputParameters() {
addStringInput(yearStart, "Starting year of the analysis", "1800");
addStringInput(yearEnd, "Ending year of the analysis", "2020");
PrimitiveTypesList speciesSelected = new PrimitiveTypesList(String.class.getName(),PrimitiveTypes.STRING, "Selected species", "List of the species to analyze", false);
super.inputs.add(speciesSelected);
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
}
@Override
public void shutdown() {
// TODO Auto-generated method stub
}
}

View File

@ -0,0 +1,9 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
public enum TaxaEnum {
GENUS,
FAMILY,
ORDER,
CLASS
}

View File

@ -0,0 +1,167 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.jfree.chart.JFreeChart;
import org.jfree.data.category.DefaultCategoryDataset;
import org.postgresql.Driver;
public class TaxaObservationsBarChartAlgorithm extends StandardLocalExternalAlgorithm{
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
private String yearStart = "Start Year :";
private String yearEnd = "End Year :";
private String taxa = "Level :";
String tax;
protected String fileName;
BufferedWriter out;
private String firstTaxaNumber = "TaxaNumber :";
private int taxaNumber;
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
private DefaultCategoryDataset dataset;
@Override
public void init() throws Exception {AnalysisLogger.getLogger().debug(
"Initialization TaxaObservationsBarChartAlgorithm");
}
@Override
public String getDescription() {
return "An algorithm producing a bar chart for the most observed taxa in a certain years range (with respect to the OBIS database)";
}
@Override
protected void process() throws Exception {
dataset = new DefaultCategoryDataset();
String driverName = "org.postgresql.Driver";
String tmp=getInputParameter(firstTaxaNumber);
taxaNumber = Integer.parseInt(tmp);
Class driverClass = Class.forName(driverName);
Driver driver = (Driver) driverClass.newInstance();
String databaseJdbc = getInputParameter(urlParameterName);
String year_start = getInputParameter(yearStart);
String year_end = getInputParameter(yearEnd);
tax = getInputParameter(taxa);
String databaseUser = getInputParameter(userParameterName);
String databasePwd = getInputParameter(passwordParameterName);
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
Connection connection = null;
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
String table="genus_table_per_year";
String column_name="genus";
if (tax.equals("LME")) {
table = "genus_table_per_year";
column_name = "genus";
} else if (tax.equals("CLASS")) {
table = "class_table_per_year";
column_name = "class";
} else if (tax.equals("FAMILY")) {
table = "family_table_per_year";
column_name = "family";
} else if (tax.equals("ORDER")) {
table = "order_table_per_year";
column_name = "order";
}
Statement stmt = connection.createStatement();
String query = "SELECT \""+column_name+"\", sum(count)AS count FROM public."+ table+" WHERE year::integer >= "
+ year_start
+ "AND year::integer <= "
+ year_end
+ " GROUP BY \""+ column_name+"\" ORDER BY count desc;";
ResultSet rs = stmt.executeQuery(query);
int i =0;
String s = column_name;
while (rs.next()&& i<taxaNumber) {
String tname = rs.getString(column_name);
String count = rs.getString("count");
out.write(column_name+","+count);
out.newLine();
if(i<100)
{PrimitiveType val = new PrimitiveType(String.class.getName(), count, PrimitiveTypes.STRING, tname, tname);
map.put(tname, val);}
int countOcc=Integer.parseInt(count);
if(i<16)
dataset.addValue(countOcc,s,tname);
i++;
}
connection.close();
out.close();
}
@Override
protected void setInputParameters() {
addStringInput(firstTaxaNumber, "Number of taxa to report", "10");
addEnumerateInput(TaxaEnum.values(), taxa, "Choose the taxonomy level",
TaxaEnum.GENUS.name());
addStringInput(yearStart, "Starting year of the analysis", "1800");
addStringInput(yearEnd, "Ending year of the analysis", "2020");
// addStringInput("Species", "The species", config.getParam("Species"));
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
}
@Override
public void shutdown() {
// TODO Auto-generated method stub
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(), PrimitiveType.stringMap2StatisticalMap(outputParameters), PrimitiveTypes.MAP, "Discrepancy Analysis","");
AnalysisLogger.getLogger().debug("MapsComparator: Producing Gaussian Distribution for the errors");
//build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = HistogramGraph.createStaticChart(dataset);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Taxonomy observations per year ("+tax+")", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(), producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation", "Graphical representation of the error spread");
//end build image
AnalysisLogger.getLogger().debug("Line Charts Species Occurrences Produced");
//collect all the outputs
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "Species observations per area", "ObsFile");
map.put("Output",f);
map.put("Result", p);
map.put("Images", images);
//generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
}

View File

@ -0,0 +1,198 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveTypesList;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.jfree.chart.JFreeChart;
import org.jfree.data.time.TimeSeries;
import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.time.Year;
import org.postgresql.Driver;
public class TaxaObservationsPerYearLineChart
extends StandardLocalExternalAlgorithm {
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
private String yearStart = "Start year :";
private String yearEnd = "End year :";
private String taxa = "Level :";
private String tax;
private String[] taxaNames;
private TimeSeriesCollection dataset;
protected String fileName;
BufferedWriter out;
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug(
"Initialization TaxaObservationsPerYearLineChart");
}
@Override
public String getDescription() {
return "Algorithm returning most observations taxonomy trend in a specific years range (with respect to the OBIS database)";
}
@Override
protected void process() throws Exception {
String driverName = "org.postgresql.Driver";
dataset = new TimeSeriesCollection();
Class driverClass = Class.forName(driverName);
Driver driver = (Driver) driverClass.newInstance();
String databaseJdbc = getInputParameter(urlParameterName);
String year_start = getInputParameter(yearStart);
String year_end = getInputParameter(yearEnd);
fileName = super.config.getPersistencePath() + "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
tax = getInputParameter(taxa);
String table="genus_table_per_year";
String column_name="genus";
if (tax.equals("GENUS")) {
table = "genus_table_per_year";
column_name = "genus";
} else if (tax.equals("CLASS")) {
table = "class_table_per_year";
column_name = "class";
} else if (tax.equals("FAMILY")) {
table = "family_table_per_year";
column_name = "family";
} else if (tax.equals("ORDER")) {
table = "order_table_per_year";
column_name = "order";
}
// AnalysisLogger.getLogger().debug("Taxonomy found: " + taxonomy.size());
String databaseUser = getInputParameter(userParameterName);
String databasePwd = getInputParameter(passwordParameterName);
Connection connection = null;
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
Statement stmt = connection.createStatement();
taxaNames = config.getParam("Selected taxonomy").split(AlgorithmConfiguration.getListSeparator());
for (String tx : taxaNames) {
AnalysisLogger.getLogger().debug("Species: " + tx);
String query = "select \""+ column_name+"\",year,count from "+table+" where upper("+column_name+") like upper('"
+ tx + "') and year::integer >="
+ year_start
+ "AND year::integer <="
+ year_end +" order by year;";
AnalysisLogger.getLogger().debug(
query);
ResultSet rs = stmt.executeQuery(query);
final TimeSeries series = new TimeSeries(tx);
while (rs.next()) {
if (rs.getString("year") != null) {
int year = Integer.parseInt(rs.getString("year"));
int count = Integer.parseInt(rs.getString("count"));
AnalysisLogger.getLogger().debug("year: " + year+ " count : "+ count);
out.write(tx+","+year+","+count);
out.newLine();
series.add(new Year(year), count);
}
}
dataset.addSeries(series);
}
AnalysisLogger.getLogger().debug(dataset.toString());
out.close();
connection.close();
}
@Override
protected void setInputParameters() {
addEnumerateInput(TaxaEnum.values(), taxa, "Choose the taxonomy level",
TaxaEnum.GENUS.name());
addStringInput(yearStart, "Starting year of the analysis", "1800");
addStringInput(yearEnd, "Ending year of the analysis", "2020");
PrimitiveTypesList taxaSelected = new PrimitiveTypesList(String.class.getName(),PrimitiveTypes.STRING, "Selected taxonomy", "List of taxa to analyze", false);
super.inputs.add(taxaSelected);
// addStringInput("Species", "The species", config.getParam("Species"));
addRemoteDatabaseInput("Obis2Repository", urlParameterName,
userParameterName, passwordParameterName, "driver", "dialect");
}
@Override
public void shutdown() {
// TODO Auto-generated method stub
}
@Override
public StatisticalType getOutput() {
PrimitiveType p = new PrimitiveType(Map.class.getName(),
PrimitiveType.stringMap2StatisticalMap(outputParameters),
PrimitiveTypes.MAP, "Discrepancy Analysis", "");
AnalysisLogger
.getLogger()
.debug("MapsComparator: Producing Line Chart for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = TimeSeriesGraph.createStaticChart(dataset, "yyyy");
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Taxa observations tends per year ("+tax+")", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
"Taxa observations per year ("+tax+")");
// end build image
AnalysisLogger.getLogger().debug(
"Line Taxonomy Occurrences Produced");
// collect all the outputs
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "Species observations per area", "ObsFile");
map.put("Output",f);
map.put("Result", p);
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
}

View File

@ -0,0 +1,63 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Hashtable;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.DynamicEnum;
import org.gcube.dataanalysis.trendylyzeralgorithms.Util;
enum MEOWEnumType {}
enum LMEenumType {}
class MEOWenum {
public Field[] getFields() {
Field[] fields = MEOWEnumType.class.getDeclaredFields();
return fields;
}}
class LMEenum extends DynamicEnum{
public Field[] getFields() {
Field[] fields = LMEenumType.class.getDeclaredFields();
return fields;
}
}
public class Test {
private static Hashtable<String, String> areaTable= new Hashtable<String , String>();
private static LMEenum enuArea=new LMEenum();
public static void main(String [] args)
{try {
Connection connection = DriverManager.getConnection(
"jdbc:postgresql://obis2.i-marine.research-infrastructures.eu/obis","postgres", "0b1s@d4sc13nc3");
AnalysisLogger.getLogger().debug("call queryArea");
String query= "select upper(lme_name)as lme_name from geo.lme";
Statement stmt;
stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query);
while (rs.next()) {
String area=rs.getString("lme_name");
String formatArea=Util.formatAreaName(area);
areaTable.put(formatArea, area);
enuArea.addEnum(LMEenumType.class, Util.formatAreaName(area));
LMEenumType selectEnumArea =LMEenumType.valueOf(Util.formatAreaName(area));
String selectedAreaName=(String) areaTable.get(selectEnumArea.toString());
System.out.print(selectedAreaName);
System.out.println();
}
connection.close();
}catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,15 @@
package org.gcube.dataanalysis.trendylyzeralgorithms;
public class Util {
public static String formatAreaName(String area)
{
String temp= area.replace(".", "_");
String temp2= temp.replace(" ", "_");
temp= temp2.replace("-", "_");
temp2= temp.replace(",", "_");
temp= temp2.replace("/", "_");
return temp;
}
}