This commit is contained in:
Gianpaolo Coro 2011-12-14 18:03:22 +00:00
parent aa9b7b2299
commit f203bbf2bb
78 changed files with 7440 additions and 0 deletions

24
.classpath Normal file
View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/jre6"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/lexicalMatcher1.2.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/log4j-1.2.16.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/hibernate3.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/dom4j-1.6.1.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/jaxen-1.1.2.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/slf4j-log4j12-1.6.0.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/slf4j-api-1.6.0.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/c3p0-0.9.1.2.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/hibernate-spatial-postgis-1.0-M2.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/hibernate-spatial-1.0-M2.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/jta-1.1.jar"/>
<classpathentry kind="lib" path="C:/Users/coro/Desktop/WorkFolder/Workspace/StatisticalLibSupportLibraries/lib/jts-1.10.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/postgis-2.0.0SVN.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/commons-collections-3.1.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/commons-logging-1.0.4.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/postgresql-8.4-702.jdbc4.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/gson-1.7.1.jar"/>
<classpathentry kind="lib" path="/StatisticalLibSupportLibraries/lib/StatisticsExtractor.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

17
.project Normal file
View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>EcologicalEngine</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,12 @@
#Tue Apr 12 14:20:07 CEST 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.source=1.6

2
CHANGELOG Normal file
View File

@ -0,0 +1,2 @@
v. 1.0.0 (20-04-2011)
* First release

1
INSTALL Normal file
View File

@ -0,0 +1 @@
Used as a Web service in the gCube Framework

8
LICENSE Normal file
View File

@ -0,0 +1,8 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to
the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is"
and no expressed or implied warranty is given for its use, quality or fitness
for a particular case.

2
MAINTAINERS Normal file
View File

@ -0,0 +1,2 @@
Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"

42
README Normal file
View File

@ -0,0 +1,42 @@
The gCube System - VREManager Service
------------------------------------------------------------
This work is partially funded by the European Commission in the
context of the D4Science project (www.d4science.eu), under the
1st call of FP7 IST priority.
Authors
-------
* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"
Version and Release Date
------------------------
version 1.0.0 (20-04-2011)
Description
--------------------
Support library for statistics analysis on Time Series data.
Download information
--------------------
Source code is available from SVN:
http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling
Binaries can be downloaded from:
http://software.d4science.research-infrastructures.eu/
Documentation
-------------
VREManager documentation is available on-line from the Projects Documentation Wiki:
not available
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

173
build.xml Normal file
View File

@ -0,0 +1,173 @@
<?xml version="1.0"?>
<!-- Ant buildfile for Libraries
Version 0.1
INPUT PROPERTIES
This buildfile assumes that the following properties have or may have been defined in a "build.properties" file
in the configuration directory of the service (or else using the -D flag on the command line).
package: (mandatory) Root package of the service implementation: it should be the first package in the
implementation hierarchy which uniquely identifies the implementation.
lib.dir: (mandatory) directory with auxiliary dependencies.
etics.build: (optional) Set to 'true', it indicates that build structures and procedures
should be optimised for remote builds in ETICS. If it is omitted, this buildfile will optimise build structures and procedures
for local builds.
-->
<project default="deploy" name="Ecological Modelling Buildfile">
<!-- environment -->
<property environment="env" />
<!-- external environment -->
<echo message="container.dir ->${env.GLOBUS_LOCATION}" level="info"/>
<property name="container.dir" value="${env.GLOBUS_LOCATION}" />
<!-- load non-standard tasks -->
<taskdef resource="ise/antelope/tasks/antlib.xml">
<classpath>
<pathelement location="${container.dir}/lib/AntelopeTasks_3.4.2.jar"/>
</classpath>
</taskdef>
<!-- discriminate between local and remote build -->
<property name="etics.build" value="false" />
<!-- library-specific locations -->
<property name="library.dir" location="." />
<property name="etc.dir.name" value="etc" />
<property name="etc.dir" value="${library.dir}/${etc.dir.name}" />
<property name="source.dir" value="${library.dir}/src" />
<!-- load input properties -->
<property file="${etc.dir}/build.properties" />
<stringutil property="package.dir" string="${package}"><!-- derive package.dir from ${package} -->
<replace regex="\." replacement="/"/>
</stringutil>
<!-- file defaults -->
<property name="jarfile" value="${package}.jar" />
<if name="etics.build" value="true">
<property name="build.location" location="${library.dir}" />
<property name="lib.location" value="${build.location}/lib" />
<else>
<property name="build.location" location="${env.BUILD_LOCATION}" />
<property name="lib.location" location="${build.location}/${lib.dir}" />
</else>
</if>
<!-- temporary build locations -->
<property name="build.dir" location="${build.location}/build" />
<property name="build.classes.dir" location="${build.dir}/classes" />
<property name="build.lib.dir" location="${build.dir}/lib" />
<!-- misc defaults -->
<property name="java.debug" value="on" />
<!-- initialisation tasks -->
<target name="init" depends="clean" description="creates build structures">
<!-- input summary -->
<echo message="Root Package -> ${package}" level="info"/>
<echo message="Configuration -> ${etc.dir}" level="info"/>
<echo message="External dependencies -> ${lib.location}" level="info"/>
<!-- output summary -->
<echo message="Library Jar -> ${jarfile}" level="info"/>
<!-- create temporary build folders -->
<mkdir dir="${build.dir}" />
<mkdir dir="${build.classes.dir}" />
<mkdir dir="${build.lib.dir}" />
<!-- create dependency location, if it does not exist already -->
<mkdir dir="${lib.location}" />
</target>
<target name="build" depends="init" description="build the library">
<javac srcdir="${source.dir}" destdir="${build.classes.dir}" debug="${java.debug}" deprecation="${java.deprecation}" description="Compile the library">
<include name="**/*.java" />
<classpath>
<fileset dir="${container.dir}/lib">
<include name="*.jar" />
<exclude name="${jarfile}" />
</fileset>
<fileset dir="${lib.location}">
<include name="**/*.jar" />
<exclude name="**/${jarfile}" />
</fileset>
</classpath>
</javac>
</target>
<target name="jar" depends="build" description="jar the library">
<copy toDir="${build.classes.dir}/META-INF">
<fileset dir="${etc.dir}" casesensitive="yes" />
</copy>
<copy todir="${build.classes.dir}">
<fileset dir="${source.dir}">
<include name="org/**/*.xsd"/>
<include name="org/**/*.xml"/>
<include name="org/**/*.properties"/>
</fileset>
</copy>
<jar jarfile="${build.lib.dir}/${jarfile}" basedir="${build.classes.dir}"/>
</target>
<target name="jar-dev" depends="build" description="jar the library including the source code">
<copy toDir="${build.classes.dir}/META-INF">
<fileset dir="${etc.dir}" casesensitive="yes" />
</copy>
<copy todir="${build.classes.dir}">
<fileset dir="${source.dir}">
<include name="org/**/*.java"/>
<include name="org/**/*.xsd"/>
<include name="org/**/*.xml"/>
<include name="org/**/*.properties"/>
</fileset>
</copy>
<jar jarfile="${build.lib.dir}/${jarfile}" basedir="${build.classes.dir}"/>
</target>
<target name="deploy" depends="jar" description="deploy the library">
<if name="etics.build" value="true">
<copy file="${build.lib.dir}/${jarfile}" toDir="${lib.location}"/>
<else>
<copy file="${build.lib.dir}/${jarfile}" toDir="${container.dir}/lib"/>
</else>
</if>
</target>
<target name="undeploy" description="undeploy the library jar">
<delete file="${container.dir}/lib/${jarfile}"/>
</target>
<target name="doc" description="Generate the library javadoc">
<javadoc access="public" author="true" sourcepath="${source.dir}" packagenames="${package}.*"
destdir="doc/api" nodeprecated="false" nodeprecatedlist="false"
noindex="false" nonavbar="false" notree="false"
source="1.6"
splitindex="true"
use="true" version="true" failonerror="false">
<classpath>
<fileset dir="${lib.location}">
<include name="**/*.jar" />
<exclude name="**/${jarfile}" />
</fileset>
<fileset dir="${container.dir}/lib">
<include name="*.jar" />
</fileset>
</classpath>
</javadoc>
</target>
<target name="clean">
<delete dir="${build.dir}" quiet="true"/>
</target>
</project>

32
cfg/ALog.properties Normal file
View File

@ -0,0 +1,32 @@
#### Use two appenders, one to log to console, another to log to a file
log4j.rootCategory= R
#### First appender writes to console
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
#log4j.appender.stdout.layout.ConversionPattern=%m%n
#log4j.appender.stdout.File=Analysis.log
#### Second appender writes to a file
log4j.logger.AnalysisLogger=trace,stdout, R
log4j.appender.R=org.apache.log4j.RollingFileAppender
#log4j.appender.R=org.apache.log4j.AsyncAppender
#log4j.appender.R.Threshold=INFO
log4j.appender.R.File=Analysis.log
log4j.appender.R.MaxFileSize=50000KB
log4j.appender.R.MaxBackupIndex=2
log4j.appender.R.layout=org.apache.log4j.PatternLayout
log4j.appender.R.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
#log4j.appender.R.layout.ConversionPattern=%m%n
#### Third appender writes to a file
log4j.logger.org.hibernate=H
#log4j.appender.H=org.apache.log4j.RollingFileAppender
log4j.appender.H=org.apache.log4j.AsyncAppender
#log4j.appender.H.File=HibernateLog.log
#log4j.appender.H.MaxFileSize=1024KB
#log4j.appender.H.MaxBackupIndex=2
log4j.appender.H.layout=org.apache.log4j.PatternLayout
log4j.appender.H.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n

View File

@ -0,0 +1,64 @@
<?xml version='1.0' encoding='UTF-8'?>
<!--
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
-->
<hibernate-configuration>
<session-factory>
<property name="connection.driver_class">org.postgresql.Driver</property>
<property name="connection.provider_class">
org.hibernate.connection.C3P0ConnectionProvider
</property>
<!-- <property name="connection.url">jdbc:postgresql://geoserver.d4science-ii.research-infrastructures.eu/testPortingGianpaolo</property>-->
<!-- <property name="connection.url">jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu/aquamapsorgdb</property> -->
<!-- <property name="connection.url">jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated</property>-->
<property name="connection.url">jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated</property>
<property name="connection.username">utente</property>
<property name="connection.password">d4science</property>
<!-- <property name="dialect"> org.hibernate.dialect.PostgresPlusDialect</property> -->
<!-- <property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property> -->
<property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property>
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="c3p0.idleConnectionTestPeriod">3600</property> <!-- seconds -->
<property name="c3p0.automaticTestTable">connectiontesttable</property>
<!--
thread is the short name for
org.hibernate.context.ThreadLocalSessionContext and let Hibernate
bind the session automatically to the thread
-->
<!-- JDBC connection pool (use the built-in) -->
<!--<property name="connection.pool_size">1</property-->
<property name="c3p0.timeout">7200</property>
<property name="c3p0.idle_test_period">3600</property>
<property name="c3p0.max_size">10</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">1</property>
<property name="current_session_context_class">thread</property>
<!-- Disable the second-level cache -->
<!--
<property
name="cache.provider_class">org.hibernate.cache.NoCacheProvider</property>
-->
<!--
<property name="hibernate.hbm2ddl.auto">create</property>
-->
<!-- this will show us all sql statements -->
<property name="hibernate.show_sql">false</property>
<!-- mapping files
<mapping resource="it/hcare/seat/report/mappings/HCallsTable.hbm.xml" />
<mapping resource="it/hcare/seat/report/mappings/HCallsDialogo.hbm.xml" />
<mapping resource="it/hcare/seat/report/mappings/HCallsRiconoscimenti.hbm.xml" />
-->
</session-factory>
</hibernate-configuration>

10
cfg/algorithms.properties Normal file
View File

@ -0,0 +1,10 @@
AQUAMAPS_SUITABLE=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsSuitable
AQUAMAPS_NATIVE=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsNative
AQUAMAPS_NATIVE_2050=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsNative2050
AQUAMAPS_SUITABLE_2050=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.AquamapsSuitable2050
REMOTE_AQUAMAPS_SUITABLE=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator
REMOTE_AQUAMAPS_NATIVE=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator
REMOTE_AQUAMAPS_NATIVE_2050=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator
REMOTE_AQUAMAPS_SUITABLE_2050=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator
DUMMY=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.DummyAlgorithm
TEST=org.gcube.application.aquamaps.ecomodelling.generators.spatialdistributions.TestAlgorithm

View File

@ -0,0 +1,3 @@
LOCAL_WITH_DATABASE=org.gcube.application.aquamaps.ecomodelling.generators.processing.LocalSplitGenerator
SIMPLE_LOCAL=org.gcube.application.aquamaps.ecomodelling.generators.processing.LocalSimpleSplitGenerator
REMOTE_RAINYCLOUD=org.gcube.application.aquamaps.ecomodelling.generators.processing.RainyCloudGenerator

2
cfg/models.properties Normal file
View File

@ -0,0 +1,2 @@
HSPEN=org.gcube.application.aquamaps.ecomodelling.generators.models.ModelHSPEN
HSPEN_MODELER=org.gcube.application.aquamaps.ecomodelling.generators.modeling.SimpleModeler

7
changelog.xml Normal file
View File

@ -0,0 +1,7 @@
<ReleaseNotes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="xsd/changelog.xsd">
<Changeset component="org.gcube.content-management.ecological-modelling.1-0-0"
date="2011-04-20">
<Change>First Release</Change>
</Changeset>
</ReleaseNotes>

16
ecologicalEngine.jardesc Normal file
View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="WINDOWS-1252" standalone="no"?>
<jardesc>
<jar path="StatisticalLibSupportLibraries/lib/ecologicalDataMining.jar"/>
<options buildIfNeeded="true" compress="true" descriptionLocation="/EcologicalEngine/ecologicalEngine.jardesc" exportErrors="true" exportWarnings="true" includeDirectoryEntries="false" overwrite="true" saveDescription="true" storeRefactorings="false" useSourceFolders="false"/>
<storedRefactorings deprecationInfo="true" structuralOnly="false"/>
<selectedProjects/>
<manifest generateManifest="true" manifestLocation="" manifestVersion="1.0" reuseManifest="false" saveManifest="false" usesManifest="true">
<sealing sealJar="false">
<packagesToSeal/>
<packagesToUnSeal/>
</sealing>
</manifest>
<selectedElements exportClassFiles="true" exportJavaFiles="false" exportOutputFolder="false">
<javaElement handleIdentifier="=EcologicalEngine/src"/>
</selectedElements>
</jardesc>

4
etc/build.properties Normal file
View File

@ -0,0 +1,4 @@
name = EcologicalModelling
package = org.gcube.application.aquamaps.ecomodelling.generators
# Where the jar will be stored
lib.dir = Dependencies/org.gcube.application.aquamaps.ecomodelling.generators

178
etc/profile.xml Normal file
View File

@ -0,0 +1,178 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="../xsd/service.xsd">
<ID />
<Type>Library</Type>
<Profile>
<Description>Ecological Modelling Library</Description>
<Class>ContentManagement</Class>
<Name>EcologicalModelling</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Description>Ecological Modelling Library</Description>
<Name>EcologicalModelling</Name>
<Version>1.2.0</Version>
<Dependencies>
<Dependency>
<Service>
<Class>Common</Class>
<Name>hibernate-patched</Name>
<Version>3.5.2</Version>
</Service>
<Package>hibernate-patched</Package>
<Version>3.5.2</Version>
<Scope level="GHN"/>
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>postgresql-jdbc</Name>
<Version>8.04.00</Version>
</Service>
<Package>postgresql-jdbc</Package>
<Version>8.04.00</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>jaxen</Name>
<Version>1.1.0</Version>
</Service>
<Package>jaxen</Package>
<Version>1.1.0</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>jcommon</Name>
<Version>1.0.16</Version>
</Service>
<Package>jcommon</Package>
<Version>1.0.16</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>Common</Class>
<Name>TSChartDataModel</Name>
<Version>1.0.0</Version>
</Service>
<Package>TSChartDataModel</Package>
<Version>[1.0.0,2.0.0)</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>Common</Class>
<Name>rapidminer-custom</Name>
<Version>1.0.0</Version>
</Service>
<Package>rapidminer-custom</Package>
<Version>1.0.0</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>hibernate-spatial-postgis</Name>
<Version>1.0.0</Version>
</Service>
<Package>hibernate-spatial-postgis</Package>
<Version>1.0.0</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>hibernate-spatial</Name>
<Version>1.0.0</Version>
</Service>
<Package>hibernate-spatial</Package>
<Version>1.0.0</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ContentManagement</Class>
<Name>LexicalMatcher</Name>
<Version>1.0.0</Version>
</Service>
<Package>LexicalMatcher</Package>
<Version>1.2.0</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ContentManagement</Class>
<Name>StatisticsExtractor</Name>
<Version>1.0.0</Version>
</Service>
<Package>StatisticsExtractor</Package>
<Version>1.2.0</Version>
<Scope level="GHN"/>
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>postgis</Name>
<Version>2.0.0</Version>
</Service>
<Package>postgis</Package>
<Version>2.0.0</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>jts</Name>
<Version>1.10.0</Version>
</Service>
<Package>jts</Package>
<Version>1.10.0</Version>
<Scope level="GHN" />
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>slf4j</Name>
<Version>1.05.00</Version>
</Service>
<Package>slf4j</Package>
<Version>1.05.00</Version>
<Scope level="GHN"/>
<Optional>false</Optional>
</Dependency>
<Dependency>
<Service>
<Class>ExternalSoftware</Class>
<Name>google-gson</Name>
<Version>1.7.1</Version>
</Service>
<Package>google-gson</Package>
<Version>1.7.1</Version>
<Scope level="GHN"/>
<Optional>false</Optional>
</Dependency>
</Dependencies>
<Files>
<File>lib/org.gcube.application.aquamaps.ecomodelling.generators.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

View File

@ -0,0 +1,9 @@
package org.gcube.dataanalysis.ecoengine.configuration;
public enum ALG_PROPS {
SPECIES_VS_CSQUARE_FROM_DATABASE,
SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE,
SPECIES_VS_CSQUARE,
PHENOMENON_VS_GEOINFO
}

View File

@ -0,0 +1,13 @@
package org.gcube.dataanalysis.ecoengine.configuration;
public enum GENERATOR_WEIGHT {
VERY_HIGH,
HIGH,
MEDIUM_HIGH,
MEDIUM,
MEDIUM_LOW,
LOW,
LOWEST
}

View File

@ -0,0 +1,278 @@
package org.gcube.dataanalysis.ecoengine.configuration;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
public class GenericConfiguration extends LexicalEngineConfiguration{
public static Properties getProperties(String absoluteFilePath) {
Properties props = new Properties();
FileInputStream fis = null;
try {
fis = new FileInputStream(absoluteFilePath);
props.load(fis);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
fis.close();
} catch (Exception e) {
}
}
return props;
}
// constants
public static String defaultConnectionFile = "DestinationDBHibernate.cfg.xml";
public static String defaultLoggerFile = "ALog.properties";
public static String algorithmsFile = "algorithms.properties";
public static String generatorsFile = "generators.properties";
public static String modelsFile = "models.properties";
public static int chunkSize = 100000;
public static int refreshResourcesTime = 10;
// database parameters
private String databaseDriver = "org.postgresql.Driver";
private String databaseURL = null;
private String databaseUserName = null;
private String databasePassword = null;
private String databaseDialect = null;
private String databaseIdleConnectionTestPeriod = null;
private String databaseAutomaticTestTable = null;
// Algorithm Parameters
private String configPath;
private String cachePath;
private String persistencePath;
private String distributionTable;
private Boolean createTable = false;
private Boolean useDB = true;
private String envelopeTable;
private String csquarecodesTable;
private String occurrenceCellsTable;
private List<String> featuresTable;
private List<String> preprocessedTables;
//service and remote
private String remoteCalculatorEndpoint;
private String serviceUserName;
private String remoteEnvironment;
private Integer numberOfResources;
//modeling
private String model;
private String generator;
//other properties
private HashMap<String, String> generalProperties;
public void setDatabaseDriver(String databaseDriver) {
this.databaseDriver = databaseDriver;
}
public String getDatabaseDriver() {
return databaseDriver;
}
public void setDatabaseURL(String databaseURL) {
this.databaseURL = databaseURL;
}
public String getDatabaseURL() {
return databaseURL;
}
public void setDatabaseUserName(String databaseUserName) {
this.databaseUserName = databaseUserName;
}
public String getDatabaseUserName() {
return databaseUserName;
}
public void setDatabasePassword(String databasePassword) {
this.databasePassword = databasePassword;
}
public String getDatabasePassword() {
return databasePassword;
}
public void setDatabaseDialect(String databaseDialect) {
this.databaseDialect = databaseDialect;
}
public String getDatabaseDialect() {
return databaseDialect;
}
public void setDatabaseIdleConnectionTestPeriod(String databaseIdleConnectionTestPeriod) {
this.databaseIdleConnectionTestPeriod = databaseIdleConnectionTestPeriod;
}
public String getDatabaseIdleConnectionTestPeriod() {
return databaseIdleConnectionTestPeriod;
}
public void setDatabaseAutomaticTestTable(String databaseAutomaticTestTable) {
this.databaseAutomaticTestTable = databaseAutomaticTestTable;
}
public String getDatabaseAutomaticTestTable() {
return databaseAutomaticTestTable;
}
public void setConfigPath(String configPath) {
if (!configPath.endsWith("/"))
configPath+="/";
this.configPath = configPath;
}
public String getConfigPath() {
return configPath;
}
public void setDistributionTable(String distributionTable) {
this.distributionTable = distributionTable;
}
public String getDistributionTable() {
return distributionTable;
}
public void setCreateTable(Boolean createTable) {
this.createTable = createTable;
}
public Boolean createTable() {
return createTable;
}
public void setNumberOfResources(Integer numberOfThreads) {
this.numberOfResources = numberOfThreads;
}
public Integer getNumberOfResources() {
return numberOfResources;
}
public void setUseDB(Boolean writeOnDB) {
this.useDB = writeOnDB;
}
public Boolean useDB() {
return useDB;
}
public void setRemoteCalculator(String remoteCalculator) {
this.remoteCalculatorEndpoint = remoteCalculator;
}
public String getRemoteCalculator() {
return remoteCalculatorEndpoint;
}
public void setServiceUserName(String serviceUserName) {
this.serviceUserName = serviceUserName;
}
public String getServiceUserName() {
return serviceUserName;
}
public void setCachePath(String cachePath) {
this.cachePath = cachePath;
}
public String getCachePath() {
return cachePath;
}
public void setGeneralProperties(HashMap<String, String> generalProperties) {
this.generalProperties = generalProperties;
}
public HashMap<String, String> getGeneralProperties() {
return generalProperties;
}
public void setRemoteEnvironment(String remoteEnvironment) {
this.remoteEnvironment = remoteEnvironment;
}
public String getRemoteEnvironment() {
return remoteEnvironment;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getEnvelopeTable() {
return envelopeTable;
}
public void setEnvelopeTable(String envelopeTable) {
this.envelopeTable = envelopeTable;
}
public String getCsquarecodesTable() {
return csquarecodesTable;
}
public void setCsquarecodesTable(String csquarecodesTable) {
this.csquarecodesTable = csquarecodesTable;
}
public List<String> getFeaturesTable() {
return featuresTable;
}
public void setFeaturesTable(List<String> featuresTable) {
this.featuresTable = featuresTable;
}
public List<String> getPreprocessedTables() {
return preprocessedTables;
}
public void setPreprocessedTables(List<String> preprocessedTables) {
this.preprocessedTables = preprocessedTables;
}
public String getOccurrenceCellsTable() {
return occurrenceCellsTable;
}
public void setOccurrenceCellsTable(String occurrenceCellsTable) {
this.occurrenceCellsTable = occurrenceCellsTable;
}
public String getPersistencePath() {
return persistencePath;
}
public void setPersistencePath(String persistencePath) {
this.persistencePath = persistencePath;
}
public String getGenerator() {
return generator;
}
public void setGenerator(String generator) {
this.generator = generator;
}
}

View File

@ -0,0 +1,71 @@
package org.gcube.dataanalysis.ecoengine.connectors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class RemoteGenerationManager {
private final String submissionMethod = "submit";
private final String statusMethod = "status/";
private String submissionID;
private String username;
private String endpoint;
public RemoteGenerationManager(String generatorEndPoint){
if (generatorEndPoint.charAt(generatorEndPoint.length()-1)=='/')
endpoint = generatorEndPoint;
else
endpoint = generatorEndPoint+"/";
}
public void submitJob(RemoteHspecInputObject rhio) throws Exception{
AnalysisLogger.getLogger().warn("RemoteGenerationManager: retrieving job information");
RemoteHspecOutputObject rhoo = null;
username = rhio.userName;
try{
rhoo = (RemoteHspecOutputObject)HttpRequest.postJSonData(endpoint+submissionMethod, rhio, RemoteHspecOutputObject.class);
AnalysisLogger.getLogger().trace("RemoteGenerationManager: job information retrieved");
}catch(Exception e){
e.printStackTrace();
AnalysisLogger.getLogger().trace("RemoteGenerationManager: ERROR - job information NOT retrieved");
throw e;
}
if ((rhoo!=null) && (rhoo.id!=null)){
AnalysisLogger.getLogger().warn("RemoteGenerationManager: job ID retrieved ");
submissionID = rhoo.id;
}
else{
AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - job ID NOT retrieved "+rhoo.error);
throw new Exception();
}
}
public double retrieveCompletion(){
RemoteHspecOutputObject rhoo = retrieveCompleteStatus();
try{
double completion = Double.parseDouble(rhoo.completion);
return completion;
}catch(Exception e){
e.printStackTrace();
AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e);
}
return 0;
}
public RemoteHspecOutputObject retrieveCompleteStatus(){
RemoteHspecOutputObject rhoo = null;
try{
rhoo = (RemoteHspecOutputObject)HttpRequest.getJSonData(endpoint+statusMethod+submissionID, null ,RemoteHspecOutputObject.class);
}catch(Exception e){
e.printStackTrace();
AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e);
}
return rhoo;
}
}

View File

@ -0,0 +1,32 @@
package org.gcube.dataanalysis.ecoengine.connectors;
import java.util.HashMap;
import java.util.List;
public class RemoteHspecInputObject {
public String userName;
public int nWorkers;
public String id;
public String generativeModel;
public String environment;
public List<String> speciesList;
public Table hspenTableName;
public Table hcafTableName;
public Table hspecDestinationTableName;
public Table occurrenceCellsTable;
public boolean is2050;
public boolean isNativeGeneration;
public HashMap<String,String> configuration;
public RemoteHspecInputObject() {
hspenTableName=new Table();
hcafTableName=new Table();
hspecDestinationTableName=new Table();
occurrenceCellsTable=new Table();
}
public class Table{
public String jdbcUrl;
public String tableName;
}
}

View File

@ -0,0 +1,40 @@
package org.gcube.dataanalysis.ecoengine.connectors;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.SingleResource;
public class RemoteHspecOutputObject {
public String id;
public String status;
public String completion;
public Metric metrics;
public String error;
// "load":[{"resId":"W1","value":51.5},{"resId":"W2","value":23.4}],"throughput":[1307977348021,16490000]}
public class Metric{
public long timestamp;
public double activityvalue;
public int processedspecies;
public Resources resources;
public List<SingleResource> load;
public List<Long> throughput;
public Metric(){
resources = new Resources();
}
public String toString(){
return timestamp+""+activityvalue+""+resources;
}
}
public String toString(){
return id+";"+status+";"+completion+";"+metrics+";"+error+";";
}
}

View File

@ -0,0 +1,17 @@
package org.gcube.dataanalysis.ecoengine.connectors.livemonitor;
public class ResourceLoad {
public long timestamp;
public double value;
public ResourceLoad (long time,double val){
timestamp = time;
value = val;
}
public String toString(){
return "["+timestamp+", "+value+"]";
}
}

View File

@ -0,0 +1,20 @@
package org.gcube.dataanalysis.ecoengine.connectors.livemonitor;
import java.util.ArrayList;
import java.util.List;
public class Resources {
public List<SingleResource> list;
public Resources(){
list = new ArrayList<SingleResource>();
}
public void addResource(String resID, double value){
list.add(new SingleResource(resID, value));
}
}

View File

@ -0,0 +1,12 @@
package org.gcube.dataanalysis.ecoengine.connectors.livemonitor;
public class SingleResource {
public String resId;
public double value;
public SingleResource(String resid, double val){
resId = resid;
value = val;
}
}

View File

@ -0,0 +1,31 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
public interface Generator {
public ALG_PROPS[] getSupportedAlgorithms();
//gets the weight of the generator: according to this the generator will be placed in the execution order
public GENERATOR_WEIGHT getWeight();
public float getStatus();
public void init();
public void setConfiguration(GenericConfiguration config);
public void shutdown();
public void stopProcess();
public String getResourceLoad();
public String getResources();
public String getLoad();
public void generate() throws Exception;
}

View File

@ -0,0 +1,34 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
public interface Model {
public float getVersion();
public void setVersion(float version);
public String getName();
public void init(Object Input, Model previousModel, Object Destination);
public String getResourceLoad();
public String getResources();
public float getStatus();
public String getInputType();
public String getOutputType();
public void postprocess(Object Input, Model previousModel, Object Destination);
public void train(Object Input, Model previousModel, Object Destination);
// gets the type of the content inside the model: e.g. Table Model, Vectorial Model etc.
public String getContentType();
// gets the content of the model: e.g. Table indications etc.
public Object getContent();
public void stop();
}

View File

@ -0,0 +1,25 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
public interface Modeler {
public void setmodel(Model model);
public void model(Object Input, Model previousModel, Object Destination);
public String getResourceLoad();
public String getResources();
public float getStatus();
public void stop();
//gets the class name of the model
public String getModelType();
// gets the content of the model: e.g. Table indications etc.
public Object getModelContent();
public Model getModel();
}

View File

@ -0,0 +1,14 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
public interface SpatialProbabilityDistribution {
//defines the properties of this algorithm
public ALG_PROPS[] getProperties();
//defines the name of this algorithm
public String getName();
//gets the description of the algorithm
public String getDescription();
}

View File

@ -0,0 +1,48 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.List;
import java.util.Map;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
//implements a SpatialProbabilityDistribution where data are taken from a Database
public interface SpatialProbabilityDistributionGeneric extends SpatialProbabilityDistribution{
//initialization of the distribution model
public void init(GenericConfiguration config);
public String getMainInfoType();
public String getGeographicalInfoType();
public List<Object> getMainInfoObjects();
//get the way geographical information will be taken
public List<Object> getGeographicalInfoObjects();
//calculate a single step of probability
public float calcProb(Object mainInfo,Object area);
//preprocessing before calculating a single probability value
public void singleStepPreprocess(Object mainInfo,Object area);
//postprocessing after calculating a single probability value
public void singleStepPostprocess(Object mainInfo,Object allAreasInformation);
//preprocessing after the whole calculation
public void postProcess();
//store the result of the probability distribution model: e.g. for the input species -> csquare , probability
public void storeDistribution(Map<Object,Map<Object,Float>> distribution);
//get the internal processing status for the single step calculation
public float getInternalStatus();
//get a unique identifier for the object representing the main information , e.g. speciesID representing the first element to be put in the species probability insert
public String getMainInfoID(Object mainInfo);
//get a unique identifier for the geographical information: e.g. csquarecode representing the second element to be put in the species probability insert
public String getGeographicalID(Object geoInfo);
}

View File

@ -0,0 +1,62 @@
package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.Queue;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.hibernate.SessionFactory;
//implements a SpatialProbabilityDistribution where data are taken from a Database
public interface SpatialProbabilityDistributionTable extends SpatialProbabilityDistribution{
//define the properties of this algorithm
public ALG_PROPS[] getProperties();
//initialization of the distribution model
public void init(GenericConfiguration config,SessionFactory dbHibConnection);
//get the way principal info will be queried
public String getMainInfoQuery();
//get the way geographical information will be taken
public String getGeographicalInfoQuery();
//get the structure of the table which will contain the prob distribution
public String getDistributionTableStatement();
//calculate a single step of probability
public float calcProb(Object mainInfo,Object area);
//get additonal metadata list to be put in the final table
public String getAdditionalMetaInformation();
//get the additional content to be put in the final table according to the Metadata
public String getAdditionalInformation(Object mainInfo,Object area);
//preprocessing before calculating a single probability value
public void singleStepPreprocess(Object mainInfo,Object area);
//postprocessing after calculating a single probability value
public void singleStepPostprocess(Object mainInfo,Object allAreasInformation);
//preprocessing after the whole calculation
public void postProcess();
//get the internal processing status for the single step calculation
public float getInternalStatus();
//get a unique identifier for the object representing the main information , e.g. speciesID representing the first element to be put in the species probability insert
public String getMainInfoID(Object mainInfo);
//get a unique identifier for the geographical information: e.g. csquarecode representing the second element to be put in the species probability insert
public String getGeographicalID(Object geoInfo);
//apply a filter to a single table row representing a probability point
public String filterProbabiltyRow(String probabiltyRow);
//apply a bulk filter
public Queue<String> filterProbabilitySet(Queue<String> probabiltyRows);
//indicate if the write of the probability rows will be during the overall computation for a single mainInformation object or after the whole processing
public boolean isSynchronousProbabilityWrite();
}

View File

@ -0,0 +1,59 @@
package org.gcube.dataanalysis.ecoengine.modeling;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
public class SimpleModeler implements Modeler{
private Model innermodel;
@Override
public void model(Object Input, Model previousModel, Object Destination) {
innermodel.init(Input, previousModel, Destination);
innermodel.train(Input, previousModel, Destination);
innermodel.postprocess(Input, previousModel, Destination);
}
@Override
public String getResourceLoad() {
return innermodel.getResourceLoad();
}
@Override
public String getResources() {
return innermodel.getResources();
}
@Override
public String getModelType() {
return innermodel.getContentType();
}
@Override
public Object getModelContent() {
return innermodel.getContent();
}
@Override
public Model getModel() {
return innermodel;
}
@Override
public void setmodel(Model model) {
innermodel = model;
}
@Override
public float getStatus() {
return innermodel.getStatus();
}
@Override
public void stop() {
innermodel.stop();
}
}

View File

@ -0,0 +1,408 @@
package org.gcube.dataanalysis.ecoengine.models;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.AquamapsEnvelopeAlgorithm;
import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.EnvelopeSet;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory;
public class ModelHSPEN implements Model {
private float version;
// DB SESSION
protected SessionFactory connection;
// Queries
private static final String alterQuery = "UPDATE %HSPEN% SET %1$s WHERE speciesid = '%2$s'";
private static final String dropHspenTable = "DROP TABLE %HSPEN%; ";
private static final String createHspenTable = "CREATE TABLE %HSPEN% ( speccode integer, speciesid character varying NOT NULL, lifestage character varying NOT NULL, faoareas character varying(100), faoareasref character varying, faocomplete smallint, nmostlat real, smostlat real, wmostlong real,emostlong real, lme character varying(180), depthyn smallint, depthmin integer, depthmax integer, depthprefmin integer, depthprefmax integer, meandepth smallint, depthref character varying, pelagic smallint, tempyn smallint, tempmin real, tempmax real, tempprefmin real, tempprefmax real, tempref character varying, salinityyn smallint, salinitymin real, salinitymax real, salinityprefmin real, salinityprefmax real, salinityref character varying, primprodyn smallint, primprodmin real, primprodmax real, primprodprefmin real, primprodprefmax real, primprodprefref character varying, iceconyn smallint, iceconmin real, iceconmax real, iceconprefmin real, iceconprefmax real, iceconref character varying, landdistyn smallint, landdistmin real, landdistmax real, landdistprefmin real, landdistprefmax real, landdistref character varying, remark character varying, datecreated timestamp without time zone, datemodified timestamp without time zone, expert integer, dateexpert timestamp without time zone, envelope smallint, mapdata smallint, effort smallint, layer character(1), usepoints smallint, rank smallint, CONSTRAINT %HSPEN%_pkey PRIMARY KEY (speciesid, lifestage))WITH ( OIDS=FALSE); CREATE INDEX envelope_%HSPEN%_idx ON %HSPEN% USING btree (envelope); CREATE INDEX mapdata_%HSPEN%_idx ON %HSPEN% USING btree (mapdata); CREATE INDEX speciesid_%HSPEN%_idx ON %HSPEN% USING btree (speciesid);";
private static final String populateNewHspen = "insert into %HSPEN% (select * from %HSPEN_ORIGIN%);";
private static final String speciesListQuery = "select distinct speciesid from %HSPEN%;";
private static final String hspenListQuery = "select speciesid, layer, iceconmin , iceconmax , iceconprefmin , iceconprefmax , salinitymin , salinitymax , salinityprefmin , salinityprefmax , landdistmin , landdistmax , landdistprefmin , landdistprefmax , tempmin , tempmax , tempprefmin , tempprefmax , primprodmin , primprodmax , primprodprefmin , primprodprefmax from %HSPEN%;";
// constants
String defaultDatabaseFile = "DestinationDBHibernate.cfg.xml";
String defaultLogFile = "ALog.properties";
private String dynamicAlterQuery;
private String dynamicDropTable;
private String dynamicCreateTable;
private String dynamicPopulateNewHspen;
private String dynamicSpeciesListQuery;
private String dynamicHspenInformationQuery;
private String currentHCAFTable;
private String currentOccurrenceTable;
private int numberOfthreads;
private ExecutorService executorService;
private boolean threadActivity[];
private int countDifferences;
private boolean interruptProcessing;
private float status;
private int numbOfProcessedSpecies;
HashMap<String, List<Object>> allSpeciesHspen;
private int lastProcessedRecordsNumber;
private long lastTime;
GenericConfiguration outconfig;
@Override
public float getVersion() {
return version;
}
@Override
public String getName() {
return ModelHSPEN.class.getName();
}
@Override
public void init(Object Input, Model previousModel, Object Destination) {
GenericConfiguration config = (GenericConfiguration) Input;
outconfig = (GenericConfiguration) Destination;
defaultDatabaseFile = config.getConfigPath() + defaultDatabaseFile;
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile);
try {
connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
} catch (Exception e) {
AnalysisLogger.getLogger().debug(e);
e.printStackTrace();
}
// initialize queries
dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getEnvelopeTable());
dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getEnvelopeTable());
dynamicCreateTable = createHspenTable.replace("%HSPEN%", outconfig.getEnvelopeTable());
dynamicPopulateNewHspen = populateNewHspen.replace("%HSPEN_ORIGIN%", config.getEnvelopeTable()).replace("%HSPEN%", outconfig.getEnvelopeTable());
dynamicSpeciesListQuery = speciesListQuery.replace("%HSPEN%", config.getEnvelopeTable());
dynamicHspenInformationQuery = hspenListQuery.replace("%HSPEN%", config.getEnvelopeTable());
currentHCAFTable = config.getCsquarecodesTable();
currentOccurrenceTable = config.getOccurrenceCellsTable();
// Threads
numberOfthreads = config.getNumberOfResources();
// interrupt process
interruptProcessing = false;
status = 0;
}
// populates the selectedSpecies variable by getting species from db
private List<Object> populateSpecies() {
AnalysisLogger.getLogger().trace("Distribution Generator ->getting all species list from DB");
List<Object> allspecies = DatabaseFactory.executeSQLQuery(dynamicSpeciesListQuery, connection);
return allspecies;
}
private HashMap<String, List<Object>> populateHspen() {
HashMap<String, List<Object>> allSpeciesHspen = new HashMap<String, List<Object>>();
List<Object> SpeciesInfo = DatabaseFactory.executeSQLQuery(dynamicHspenInformationQuery, connection);
int lenSpecies = SpeciesInfo.size();
for (int i = 0; i < lenSpecies; i++) {
Object[] speciesArray = (Object[]) SpeciesInfo.get(i);
String speciesid = (String) speciesArray[0];
List<Object> singleSpeciesInfo = new ArrayList<Object>();
singleSpeciesInfo.add(speciesArray);
allSpeciesHspen.put((String) speciesid, singleSpeciesInfo);
}
return allSpeciesHspen;
}
// initializes threads activity status
public void initializeThreads(int numberOfThreadsToUse) {
// initialize threads and their activity state
executorService = Executors.newFixedThreadPool(numberOfThreadsToUse);
threadActivity = new boolean[numberOfThreadsToUse];
// initialize to false;
for (int j = 0; j < threadActivity.length; j++) {
threadActivity[j] = false;
}
}
// waits for thread to be free
private void wait4Thread(int index) {
// wait until thread is free
while (threadActivity[index]) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
}
}
}
// shutdown the connection
public void shutdownConnection() {
connection.close();
}
private void generateTable(Object Input) throws Exception {
GenericConfiguration config = (GenericConfiguration) Input;
// create and populate the novel table
if (config.createTable()){
AnalysisLogger.getLogger().trace("Distribution Generator->recreating new table " + dynamicCreateTable);
try{
DatabaseFactory.executeSQLUpdate(String.format(dynamicDropTable, config.getDatabaseUserName()), connection);
}catch(Exception e){
AnalysisLogger.getLogger().trace("Impossible to drop table - maybe not existing");
}
try{
DatabaseFactory.executeSQLUpdate(String.format(dynamicCreateTable, config.getDatabaseUserName()), connection);
}catch(Exception e){
AnalysisLogger.getLogger().trace("Impossible to create table - maybe yet existing");
}
}
AnalysisLogger.getLogger().trace("Distribution Generator->populating new table " + dynamicPopulateNewHspen);
DatabaseFactory.executeSQLUpdate(dynamicPopulateNewHspen, connection);
}
@Override
public void train(Object Input, Model previousModel, Object Destination) {
long tstart = System.currentTimeMillis();
// INITIALIZATION
try {
AnalysisLogger.getLogger().trace("ModelHSPENr->populating species");
List<Object> allspecies = populateSpecies();
allSpeciesHspen = populateHspen();
AnalysisLogger.getLogger().trace("ModelHSPENr->ENVELOPES GENERATION STARTED");
// initialize threads
initializeThreads(numberOfthreads);
// END INITIALIZATION
// generate the hspen table
generateTable(Input);
// thread selection index
int currentThread = 0;
// global chunks counter
int globalcounter = 0;
// count differences in hspen original and new hspen
countDifferences = 0;
// take time
long computationT0 = System.currentTimeMillis();
int numberOfSpecies = allspecies.size();
// ENVELOPES CALCULATION
// cycle throw the species to generate
// one thread calculation for each species
for (Object species : allspecies) {
// get speciesID
String speciesid = (String) species;
if (speciesid.length() > 0) {
// calculation on multiple threads
AnalysisLogger.getLogger().trace("ModelHSPENr->ANALIZING SPECIES: " + speciesid);
// wait for thread to be free
wait4Thread(currentThread);
// start species information calculation on the thread
startNewTCalc(currentThread, speciesid);
// increment thread selection index
currentThread++;
// reset current thread index
if (currentThread >= numberOfthreads)
currentThread = 0;
// report probability
float s = (float) ((int) (((float) globalcounter * 100f / (numberOfSpecies)) * 100f)) / 100f;
status = (s == 100) ? 99 : s;
AnalysisLogger.getLogger().trace("STATUS->" + status + "%");
// increment global counter index
globalcounter++;
AnalysisLogger.getLogger().warn("Number of Found Differences: " + countDifferences);
}
if (interruptProcessing)
break;
}
// END OF CALCULATION CORE
// wait for last threads to finish
for (int i = 0; i < numberOfthreads; i++) {
// free previous calculation
wait4Thread(i);
}
long computationT1 = System.currentTimeMillis();
AnalysisLogger.getLogger().warn("All Envelopes Computation Finished in " + (computationT1 - computationT0) + " ms");
AnalysisLogger.getLogger().warn("Number of Overall Found Differences: " + countDifferences);
} catch (Exception e) {
AnalysisLogger.getLogger().trace("Computation traminate prematurely: ", e);
} finally {
// shutdown threads
executorService.shutdown();
// shutdown connection
shutdownConnection();
// set completeness
status = 100.0f;
long tstop = System.currentTimeMillis();
AnalysisLogger.getLogger().warn("All Envelopes Computation Finished in " + (tstop - tstart) + " ms");
}
}
// THREAD SECTION
// definition of the Thread
// calculates values for one species
private class ThreadCalculator implements Callable<Integer> {
int index;
String species;
public ThreadCalculator(int index, String species) {
this.species = species;
this.index = index;
}
public Integer call() {
try {
calcEnvelopes(species);
} catch (Exception e) {
AnalysisLogger.getLogger().trace("" + e);
e.printStackTrace();
}
threadActivity[index] = false;
return 0;
}
}
// end Definition of the Thread
// activation
private void startNewTCalc(int index, String species) {
threadActivity[index] = true;
ThreadCalculator tc = new ThreadCalculator(index, species);
executorService.submit(tc);
}
// END OF THREAD SECTION
// calculation for standalone mode
public void calcEnvelopes(String species) {
// take initial time
long t0 = System.currentTimeMillis();
try {
// take information for the selected Species
List<Object> singleHspen = allSpeciesHspen.get(species);
// call all envelopes calculations
EnvelopeSet envSet = AquamapsEnvelopeAlgorithm.calculateEnvelopes(species, connection, currentOccurrenceTable, currentHCAFTable, (Object[]) singleHspen.get(0));
String instruction = envSet.getEnvelopeString();
// take the result of the calculation
long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().trace("Computation for species " + species + " finished in " + (t1 - t0) + " ms");
if (instruction.length() > 0) {
countDifferences++;
// write results on the DB
String query = String.format(dynamicAlterQuery, instruction, species);
try {
AnalysisLogger.getLogger().trace("Envelope Generated - executing query: " + query);
DatabaseFactory.executeSQLUpdate(query, connection);
} catch (Exception e) {
AnalysisLogger.getLogger().trace("could not execute update");
e.printStackTrace();
// System.exit(0);
}
}
} catch (Exception ex) {
AnalysisLogger.getLogger().trace("Computation traminated prematurely: ", ex);
}
numbOfProcessedSpecies++;
// take ending time
}
@Override
public String getContentType() {
return GenericConfiguration.class.getName();
}
@Override
public Object getContent() {
return outconfig;
}
@Override
public void setVersion(float version) {
this.version = version;
}
@Override
public void postprocess(Object Input, Model previousModel, Object Destination) {
}
@Override
public String getResourceLoad() {
String returnString = "";
try {
long tk = System.currentTimeMillis();
// double activity = Double.valueOf(processedRecordsCounter)*1000.00/Double.valueOf(tk-tstart);
double activity = Double.valueOf(numbOfProcessedSpecies - lastProcessedRecordsNumber) * 1000.00 / Double.valueOf(tk - lastTime);
lastTime = tk;
lastProcessedRecordsNumber = numbOfProcessedSpecies;
ResourceLoad rs = new ResourceLoad(tk, activity);
returnString = rs.toString();
} catch (Exception e) {
e.printStackTrace();
long tk = System.currentTimeMillis();
returnString = new ResourceLoad(tk, 0).toString();
}
return returnString;
}
@Override
//this methods gets information about the threads or the machines which are running the computation
public String getResources(){
Resources res = new Resources();
try{
for (int i=0;i<numberOfthreads;i++){
try{
double value = (threadActivity[i])? 100.00:0.00;
res.addResource("Thread_"+(i+1),value);
}catch(Exception e1){}
}
}catch(Exception e){
e.printStackTrace();
}
if ((res!=null) && (res.list!=null))
return HttpRequest.toJSon(res.list).replace("resId", "resID");
else
return "";
}
@Override
public void stop() {
interruptProcessing = true;
}
@Override
public float getStatus() {
return status;
}
@Override
public String getInputType() {
return GenericConfiguration.class.getName();
}
@Override
public String getOutputType() {
return GenericConfiguration.class.getName();
}
}

View File

@ -0,0 +1,182 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
public class AquamapsEnvelope {
public Double Min;
public Double PMin;
public Double Max;
public Double PMax;
public static final double $TempUpper = 30;
public static final double $TempLower = -2;
public static final double $SalinUpper = 40.2;
public static final double $SalinLower = 3.56;
public static final double $ProdUpper = 6000;
public static final double $ProdLower = 0;
public static final double $LandUpper = 4200;
public static final double $LandLower = 0;
public static final double $SalinBUpper = 40.9;
public static final double $SalinBLower = 3.5;
private float toleranceThr = 0.015f; //tolerance on relative error
public static int round(double $n){
$n = Math.round($n * 100.00)/100.00;
String $nstr = ""+$n;
int $dec_pos = $nstr.indexOf('.');
int $final = 0;
String $left_char = "";
String $right_char = "";
if ($dec_pos>0)
{
$left_char=$nstr.substring($dec_pos-1,$dec_pos);
$right_char=$nstr.substring($dec_pos+1,$dec_pos+2);
}
if ($right_char.equals("5"))
{
if (
$left_char.equals("0") ||
$left_char.equals("2") ||
$left_char.equals("4") ||
$left_char.equals("6") ||
$left_char.equals("8")
)
{
$final = (int)Math.round($n)-1;
}
else
{
$final = (int)Math.round($n);
}
}
else
{
$final = (int)Math.round($n);
}
return $final;
}
public void calculatePercentiles(List<Object> speciesOccurrences, Double $Uppermost, Double $Lowermost){
int position = 2;
int $reccount = speciesOccurrences.size();
//compute positions of percentiles: 25th, 75th, 10th and 90th
int $Rec25 = round(25f * ($reccount + 1f) / 100f) - 1; //25
int $Rec75 = round(75f * ($reccount + 1f) / 100f) - 1; //75
int $Rec10 = 0;
int $Rec90 = 0;
if ($reccount >= 10 && $reccount <= 13)
{
$Rec10 = round(10f * ($reccount + 1f) / 100f);
$Rec90 = round(90f * ($reccount + 1f) / 100f) - 2;
}
else
{
$Rec10 = round(10f * ($reccount + 1f) / 100f) - 1;
$Rec90 = round(90f * ($reccount + 1f) / 100f) - 1;
}
//get percentiles
// $paramData->data_seek(0);
Object[] $row2 = (Object[])speciesOccurrences.get(0);
double $Min = AquamapsEnvelopeAlgorithm.getNumber($row2,position);
// $paramData->data_seek($reccount - 1);
$row2 = (Object[])speciesOccurrences.get($reccount - 1);
double $Max = AquamapsEnvelopeAlgorithm.getNumber($row2,position);
// $paramData->data_seek($Rec25);
$row2 = (Object[])speciesOccurrences.get($Rec25);
double $25 = AquamapsEnvelopeAlgorithm.getNumber($row2,position);
// $paramData->data_seek($Rec75);
$row2 = (Object[])speciesOccurrences.get($Rec75);
double $75 = AquamapsEnvelopeAlgorithm.getNumber($row2,position);
// $paramData->data_seek($Rec10);
$row2 = (Object[])speciesOccurrences.get($Rec10);
double $PMin = AquamapsEnvelopeAlgorithm.getNumber($row2,position);
// $paramData->data_seek($Rec90);
$row2 = (Object[])speciesOccurrences.get($Rec90);
double $PMax = AquamapsEnvelopeAlgorithm.getNumber($row2,position);
if (($Uppermost!= null) && ($Lowermost != null)){
//interquartile adjusting
double $InterQuartile = Math.abs($25 - $75);
double $ParaAdjMax = $75 + Double.valueOf(1.5) * $InterQuartile;
double $ParaAdjMin = $25 - Double.valueOf(1.5) * $InterQuartile;
if ($ParaAdjMax < $Uppermost && $ParaAdjMax > $Max)
{
$Max = $ParaAdjMax;
}
if ($ParaAdjMin > $Lowermost && $ParaAdjMin < $Min)
{
$Min = $ParaAdjMin;
}
}
Min = $Min;
Max = $Max;
PMin = $PMin;
PMax = $PMax;
}
private static double relativeError(double realvalue,double calculatedvalue){
double absoluteError = Math.abs(realvalue-calculatedvalue);
double relativeErr = 0;
double denominator = 1;
if (realvalue!=0)
denominator = realvalue;
if (!((realvalue ==0) && (absoluteError==0)))
relativeErr = absoluteError/denominator;
// AnalysisLogger.getLogger().debug("relative error "+relativeErr+" "+realvalue+" vs "+calculatedvalue);
return Math.abs(relativeErr);
}
public boolean checkPrevious(Double prevMin,Double prevMax,Double prevPMin,Double prevPMax){
try{
if ((relativeError(prevMin,Min)<toleranceThr) && (relativeError(prevMax,Max)<toleranceThr) && (relativeError(prevPMin,PMin)<toleranceThr) && (relativeError(prevPMax,PMax)<toleranceThr))
return true;
else
return false;
}catch(Exception e){
return false;
}
}
public Envelope toEnvelope(EnvelopeName name){
Min = (Min==null)?null:MathFunctions.roundDecimal(Min,2);
PMin = (PMin==null)?null:MathFunctions.roundDecimal(PMin,2);
PMax = (PMax==null)?null:MathFunctions.roundDecimal(PMax,2);
Max = (Max==null)?null:MathFunctions.roundDecimal(Max,2);
Envelope env = new Envelope(""+Min, ""+PMin,""+PMax, ""+Max);
env.setName(name);
return env;
}
}

View File

@ -0,0 +1,236 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory;
public class AquamapsEnvelopeAlgorithm {
private static final String selectValues = "SELECT DISTINCT %OCCURRENCEPOINTS%.CsquareCode, %OCCURRENCEPOINTS%.SpeciesID, %HCAF%.%1$s FROM %OCCURRENCEPOINTS% INNER JOIN %HCAF% ON %OCCURRENCEPOINTS%.CsquareCode = %HCAF%.CsquareCode WHERE %OCCURRENCEPOINTS%.SpeciesID = '%2$s' AND %HCAF%.%1$s <> -9999 AND %HCAF%.%1$s is not null AND %HCAF%.OceanArea > 0 AND %OCCURRENCEPOINTS%.goodcell = '1' ORDER BY %HCAF%.%1$s";
//gets the initialization value for a string object
public static String getElement(Object[] featuresVector,int index){
if (featuresVector[index] != null) return ""+featuresVector[index];
else return null;
}
//gets the initialization value for a numeric object
public static double getNumber(Object[] featuresVector,int index){
double number = -9999;
try{
number = ((Number)featuresVector[index]).doubleValue();
}catch(Exception e){}
return number;
}
public AquamapsEnvelopeAlgorithm(){
}
//calculate envelopes on feature sets
public static EnvelopeSet calculateEnvelopes(String species, Object[] singleSpeciesValues, OccurrencePointSets occurrencePointsList){
List<Object> tempvalues = new ArrayList<Object>();
List<Object> salinityvalues = new ArrayList<Object>();
List<Object> primprodvalues = new ArrayList<Object>();
List<Object> icevalues = new ArrayList<Object>();
List<Object> landdistvalues = new ArrayList<Object>();
List<OccurrencePoint> list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.TEMPERATURE);
for (OccurrencePoint op:list){
tempvalues.add(op.toObjectArray());
}
list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.SALINITY);
for (OccurrencePoint op:list){
salinityvalues.add(op.toObjectArray());
}
list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.PRIMARY_PRODUCTION);
for (OccurrencePoint op:list){
primprodvalues.add(op.toObjectArray());
}
list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.ICE_CONCENTRATION);
for (OccurrencePoint op:list){
icevalues.add(op.toObjectArray());
}
list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.LAND_DISTANCE);
for (OccurrencePoint op:list){
landdistvalues.add(op.toObjectArray());
}
//build up envelope set
EnvelopeSet envSet = calcEnv(species,singleSpeciesValues,tempvalues,salinityvalues,primprodvalues,icevalues,landdistvalues);
return envSet;
}
//the core of the procedure
public static EnvelopeSet calcEnv(String species, Object[] singleSpeciesValues, List<Object> tempvalues,List<Object> salinityvalues,List<Object> primprodvalues,List<Object> icevalues,List<Object> landdistvalues){
if (tempvalues.size()<10){
AnalysisLogger.getLogger().warn("WARNING: NOT ENOUGH OCCURRENCES FOR SPECIES: "+species);
AnalysisLogger.getLogger().warn("Leaving the hspen as is");
return new EnvelopeSet();
}
//take previousValues
Double prevIceMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,2);
Double prevIceMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,3);
Double prevIcePMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,4);
Double prevIcePMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,5);
Double prevSalinityMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,6);
Double prevSalinityMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,7);
Double prevSalinityPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,8);
Double prevSalinityPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,9);
Double prevLanddistMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,10);
Double prevLanddistMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,11);
Double prevLanddistPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,12);
Double prevLanddistPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,13);
Double prevTempMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,14);
Double prevTempMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,15);
Double prevTempPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,16);
Double prevTempPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,17);
Double prevPrimProdMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,18);
Double prevPrimProdMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,19);
Double prevPrimProdPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,20);
Double prevPrimProdPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,21);
//previous values taken
String layer = getElement(singleSpeciesValues,1);
SpEnv_temp tempEnv = new SpEnv_temp();
tempEnv.calcEnvelope(layer, tempvalues);
SpEnv_salinity salinityEnv = new SpEnv_salinity();
salinityEnv.calcEnvelope(salinityvalues,layer);
SpEnv_primprod primprodEnv = new SpEnv_primprod();
primprodEnv.calcEnvelope(primprodvalues);
SpEnv_seaice seaiceEnv = new SpEnv_seaice();
seaiceEnv.calcEnvelope(icevalues);
SpEnv_landdist landdistEnv = new SpEnv_landdist();
landdistEnv.calcEnvelope(landdistvalues);
String addingElements = "";
int countchunks = 0;
if (!tempEnv.checkPrevious(prevTempMin,prevTempMax,prevTempPMin,prevTempPMax))
{
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevTempMin+","+prevTempPMin+","+prevTempPMax+","+prevTempMax+" vs "+tempEnv.toString());
addingElements+=tempEnv.toString();
countchunks++;
}
if (!salinityEnv.checkPrevious(prevSalinityMin,prevSalinityMax,prevSalinityPMin,prevSalinityPMax))
{
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevSalinityMin+","+prevSalinityPMin+","+prevSalinityPMax+","+prevSalinityMax+" vs "+salinityEnv.toString());
if (countchunks>0)
addingElements+=",";
addingElements+=salinityEnv.toString();
countchunks++;
}
if (!primprodEnv.checkPrevious(prevPrimProdMin,prevPrimProdMax,prevPrimProdPMin,prevPrimProdPMax))
{
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevPrimProdMin+","+prevPrimProdPMin+","+prevPrimProdPMax+","+prevPrimProdMax+" vs "+primprodEnv.toString());
if (countchunks>0)
addingElements+=",";
addingElements+=primprodEnv.toString();
countchunks++;
}
if (!seaiceEnv.checkPrevious(prevIceMin,prevIceMax,prevIcePMin,prevIcePMax))
{
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevIceMin+","+prevIcePMin+","+prevIcePMax+","+prevIceMax+" vs "+seaiceEnv.toString());
if (countchunks>0)
addingElements+=",";
addingElements+=seaiceEnv.toString();
countchunks++;
}
if (!landdistEnv.checkPrevious(prevLanddistMin,prevLanddistMax,prevLanddistPMin,prevLanddistPMax))
{
AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevLanddistMin+","+prevLanddistPMin+","+prevLanddistPMax+","+prevLanddistPMax+" vs "+landdistEnv.toString());
if (countchunks>0)
addingElements+=",";
addingElements+=landdistEnv.toString();
countchunks++;
}
//build up envelope set
EnvelopeSet envSet = new EnvelopeSet();
envSet.addEnvelope(tempEnv.toEnvelope(EnvelopeName.TEMPERATURE));
envSet.addEnvelope(salinityEnv.toEnvelope(EnvelopeName.SALINITY));
envSet.addEnvelope(primprodEnv.toEnvelope(EnvelopeName.PRIMARY_PRODUCTION));
envSet.addEnvelope(seaiceEnv.toEnvelope(EnvelopeName.ICE_CONCENTRATION));
envSet.addEnvelope(landdistEnv.toEnvelope(EnvelopeName.LAND_DISTANCE));
envSet.setEnvelopeString(addingElements);
return envSet;
}
public static EnvelopeSet calculateEnvelopes(String species, SessionFactory vreConnection, String occurrencePointsTable, String HcafTable, Object[] singleSpeciesValues){
String dynamicSelectValues = selectValues.replace("%OCCURRENCEPOINTS%", occurrencePointsTable).replace("%HCAF%", HcafTable);
String layer = getElement(singleSpeciesValues,1);
String TemperatureField = "SSTAnMean";
String SalinityField = "SalinityMean";
String PrimProdField = "PrimProdMean";
String IceField = "IceConAnn";
String LanddistField = "LandDist";
if ((layer != null)&&(layer.equals("b"))){
TemperatureField = "SBTAnMean";
SalinityField = "SalinityBMean";
}
String TemperatureQuery = String.format(dynamicSelectValues,TemperatureField,species);
String SalinityQuery = String.format(dynamicSelectValues,SalinityField,species);
String PrimProdQuery = String.format(dynamicSelectValues,PrimProdField,species);
String IceQuery = String.format(dynamicSelectValues,IceField,species);
String LanddistQuery = String.format(dynamicSelectValues,LanddistField,species);
List<Object> tempvalues = DatabaseFactory.executeSQLQuery(TemperatureQuery, vreConnection);
List<Object> salinityvalues = DatabaseFactory.executeSQLQuery(SalinityQuery, vreConnection);
List<Object> primprodvalues = DatabaseFactory.executeSQLQuery(PrimProdQuery, vreConnection);
List<Object> icevalues = DatabaseFactory.executeSQLQuery(IceQuery, vreConnection);
List<Object> landdistvalues = DatabaseFactory.executeSQLQuery(LanddistQuery, vreConnection);
//build up envelope set
EnvelopeSet envSet = calcEnv(species,singleSpeciesValues,tempvalues,salinityvalues,primprodvalues,icevalues,landdistvalues);
return envSet;
}
public Object[] hspen2ObjectArray(Hspen hspen) {
//convert hspen to object array
Object [] singleHspen = new Object[22];
singleHspen[0] = hspen.getSpeciesID();singleHspen[1] = hspen.getLayer();
singleHspen[2] = hspen.getIceConcentration().getMin();singleHspen[3] = hspen.getIceConcentration().getMax();
singleHspen[4] = hspen.getIceConcentration().getPrefmin();singleHspen[5] = hspen.getIceConcentration().getPrefmax();
singleHspen[6] = hspen.getSalinity().getMin();singleHspen[7] = hspen.getSalinity().getMax();
singleHspen[8] = hspen.getSalinity().getPrefmin();singleHspen[9] = hspen.getSalinity().getPrefmax();
singleHspen[10] = hspen.getLandDistance().getMin();singleHspen[11] = hspen.getLandDistance().getMax();
singleHspen[12] = hspen.getLandDistance().getPrefmin();singleHspen[13] = hspen.getLandDistance().getPrefmax();
singleHspen[14] = hspen.getTemperature().getMin();singleHspen[15] = hspen.getTemperature().getMax();
singleHspen[16] = hspen.getTemperature().getPrefmin();singleHspen[17] = hspen.getTemperature().getPrefmax();
singleHspen[18] = hspen.getPrimaryProduction().getMin();singleHspen[19] = hspen.getPrimaryProduction().getMax();
singleHspen[20] = hspen.getPrimaryProduction().getPrefmin();singleHspen[21] = hspen.getPrimaryProduction().getPrefmax();
return singleHspen;
}
}

View File

@ -0,0 +1,64 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
public class Coordinates {
private String name;
private String NMostLat;
private String SMostLat;
private String WMostLong;
private String EMostLong;
private String maxCenterLat;
private String minCenterLat;
public Coordinates(String nmostLat,String smostLat,String wmostLong,String emostLong,String maxCenterLat,String minCenterLat){
NMostLat = nmostLat;
SMostLat = smostLat;
WMostLong = wmostLong;
EMostLong = emostLong;
maxCenterLat = maxCenterLat;
minCenterLat = minCenterLat;
}
public void setNMostLat(String nMostLat) {
NMostLat = nMostLat;
}
public String getNMostLat() {
return NMostLat;
}
public void setSMostLat(String sMostLat) {
SMostLat = sMostLat;
}
public String getSMostLat() {
return SMostLat;
}
public void setWMostLong(String wMostLong) {
WMostLong = wMostLong;
}
public String getWMostLong() {
return WMostLong;
}
public void setEMostLong(String eMostLong) {
EMostLong = eMostLong;
}
public String getEMostLong() {
return EMostLong;
}
public void setMaxCenterLat(String maxCenterLat) {
maxCenterLat = maxCenterLat;
}
public String getMaxCenterLat() {
return maxCenterLat;
}
public void setMinCenterLat(String minCenterLat) {
minCenterLat = minCenterLat;
}
public String getMinCenterLat() {
return minCenterLat;
}
public void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
}

View File

@ -0,0 +1,56 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
public class Envelope {
private EnvelopeName name;
private String min;
private String prefmin;
private String prefmax;
private String max;
public Envelope(){
}
public Envelope(String min,String prefMin,String prefMax,String max){
this.min = min;
this.prefmin = prefMin;
this.prefmax = prefMax;
this.max = max;
}
public void setMin(String min) {
this.min = min;
}
public String getMin() {
return min;
}
public void setPrefmin(String prefmin) {
this.prefmin = prefmin;
}
public String getPrefmin() {
return prefmin;
}
public void setPrefmax(String prefmax) {
this.prefmax = prefmax;
}
public String getPrefmax() {
return prefmax;
}
public void setMax(String max) {
this.max = max;
}
public String getMax() {
return max;
}
public void setName(EnvelopeName name) {
this.name = name;
}
public EnvelopeName getName() {
return name;
}
}

View File

@ -0,0 +1,7 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
public enum EnvelopeModel {
AQUAMAPS,
}

View File

@ -0,0 +1,10 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
public enum EnvelopeName {
TEMPERATURE,
SALINITY,
PRIMARY_PRODUCTION,
ICE_CONCENTRATION,
LAND_DISTANCE
}

View File

@ -0,0 +1,37 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.ArrayList;
import java.util.List;
public class EnvelopeSet {
private List<Envelope> envelopes;
private String envelopeString;
public EnvelopeSet(){
envelopes = new ArrayList<Envelope>();
envelopeString = "";
}
public void setEnvelopeString(String envelopeString) {
this.envelopeString = envelopeString;
}
public String getEnvelopeString() {
return envelopeString;
}
public void setEnvelopes(List<Envelope> envelopes) {
this.envelopes = envelopes;
}
public List<Envelope> getEnvelopes() {
return envelopes;
}
public void addEnvelope(Envelope e){
envelopes.add(e);
}
}

View File

@ -0,0 +1,127 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
public class Hspen {
private String speciesID;
private Envelope depth;
private String meanDepth;
private boolean pelagic;
private String layer;
private Envelope temperature;
private Envelope salinity;
private Envelope primaryProduction;
private Envelope iceConcentration;
private boolean landDistanceYN;
private Envelope landDistance;
private Coordinates coordinates;
private String faoAreas;
public void setDepth(Envelope depth) {
this.depth = depth;
}
public Envelope getDepth() {
return depth;
}
public void setMeanDepth(String meanDepth) {
this.meanDepth = meanDepth;
}
public String getMeanDepth() {
return meanDepth;
}
public void setPelagic(boolean pelagic) {
this.pelagic = pelagic;
}
public boolean isPelagic() {
return pelagic;
}
public void setLayer(String layer) {
this.layer = layer;
}
public String getLayer() {
return layer;
}
public void setTemperature(Envelope temperature) {
this.temperature = temperature;
}
public Envelope getTemperature() {
return temperature;
}
public void setSalinity(Envelope salinity) {
this.salinity = salinity;
}
public Envelope getSalinity() {
return salinity;
}
public void setPrimaryProduction(Envelope primaryProduction) {
this.primaryProduction = primaryProduction;
}
public Envelope getPrimaryProduction() {
return primaryProduction;
}
public void setIceConcentration(Envelope iceConcentration) {
this.iceConcentration = iceConcentration;
}
public Envelope getIceConcentration() {
return iceConcentration;
}
public void setLandDistanceYN(boolean landDistanceYN) {
this.landDistanceYN = landDistanceYN;
}
public boolean isLandDistanceYN() {
return landDistanceYN;
}
public void setLandDistance(Envelope landDistance) {
this.landDistance = landDistance;
}
public Envelope getLandDistance() {
return landDistance;
}
public void setCoordinates(Coordinates coordinates) {
this.coordinates = coordinates;
}
public Coordinates getCoordinates() {
return coordinates;
}
public void setFaoAreas(String faoAreas) {
this.faoAreas = faoAreas;
}
public String getFaoAreas() {
return faoAreas;
}
public Object[] toObjectArray(){
Object[] array = new Object[33];
array[0] = depth.getMin();array[1] = meanDepth; array[2] = depth.getPrefmin();
array[3] = (pelagic)?1:0;
array[4] = depth.getPrefmax(); array[5] = depth.getMax();
array[6] = temperature.getMin();
array[7] = layer;
array[8] = temperature.getPrefmin();array[9] = temperature.getPrefmax();array[10] = temperature.getMax();
array[11] = salinity.getMin();array[12] = salinity.getPrefmin();array[13] = salinity.getPrefmax();array[14] = salinity.getMax();
array[15] = primaryProduction.getMin();array[16] = primaryProduction.getPrefmin();array[17] = primaryProduction.getPrefmax();array[18] = primaryProduction.getMax();
array[19] = iceConcentration.getMin();array[20] = iceConcentration.getPrefmin();array[21] = iceConcentration.getPrefmax();array[22] = iceConcentration.getMax();
array[23] = (landDistanceYN)?1:0;
array[24] = landDistance.getMin();array[25] = landDistance.getPrefmin();array[26] = landDistance.getPrefmax();array[27] = landDistance.getMax();
array[28] = coordinates.getNMostLat();array[29] = coordinates.getSMostLat();array[30] = coordinates.getWMostLong();array[31] = coordinates.getEMostLong();
array[32] = faoAreas;
return array;
}
public Object[] latitudeExtent(){
Object[] array = new Object[2];
array[0] = coordinates.getMaxCenterLat();
array[1] = coordinates.getMinCenterLat();
return array;
}
public void setSpeciesID(String speciesID) {
this.speciesID = speciesID;
}
public String getSpeciesID() {
return speciesID;
}
}

View File

@ -0,0 +1,54 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
public class OccurrencePoint {
private String speciesID;
private String cSquareCode;
private Double value;
public OccurrencePoint(String speciesID,String cSquareCode,Double value){
this.speciesID=speciesID;
this.cSquareCode= cSquareCode;
this.value = value;
}
public OccurrencePoint(String cSquareCode, Double value){
this.cSquareCode= cSquareCode;
this.value = value;
}
public OccurrencePoint(Double value){
this.speciesID="";
this.cSquareCode= "";
this.value = value;
}
public void setSpeciesID(String speciesID) {
this.speciesID = speciesID;
}
public String getSpeciesID() {
return speciesID;
}
public void setCsquareCode(String csquareCode) {
this.cSquareCode = csquareCode;
}
public String getCsquareCode() {
return cSquareCode;
}
public void setValue(Double value) {
this.value = value;
}
public Double getValue() {
return value;
}
public Object[] toObjectArray(){
Object[] array = new Object[3];
array [0] = cSquareCode;
array [1] = speciesID;
array [2] = value;
return array;
}
}

View File

@ -0,0 +1,41 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class OccurrencePointSets {
private Map<String,List<OccurrencePoint>> occurrenceMap;
public OccurrencePointSets(){
occurrenceMap = new HashMap<String, List<OccurrencePoint>>();
}
public void setOccurrenceMap(Map<String,List<OccurrencePoint>> occurrenceMap) {
this.occurrenceMap = occurrenceMap;
}
public Map<String,List<OccurrencePoint>> getOccurrenceMap() {
return occurrenceMap;
}
public void addOccurrencePointList(String name,List<OccurrencePoint> pointsList){
occurrenceMap.put(name, pointsList);
}
public void addOccurrencePointList(EnvelopeModel name,List<OccurrencePoint> pointsList){
occurrenceMap.put(""+name, pointsList);
}
public void addOccurrencePoint(String name,OccurrencePoint occurrencePoint){
List<OccurrencePoint> occurrenceList = occurrenceMap.get(name);
occurrenceList.add(occurrencePoint);
}
public void addOccurrencePoint(EnvelopeModel name,OccurrencePoint occurrencePoint){
List<OccurrencePoint> occurrenceList = occurrenceMap.get(""+name);
occurrenceList.add(occurrencePoint);
}
}

View File

@ -0,0 +1,64 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.List;
public class SpEnv_landdist extends AquamapsEnvelope {
/*
$strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.LandDist
FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode
WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "'
AND HCAF.LandDist <> -9999
AND HCAF.LandDist is not null
AND HCAF.OceanArea > 0
AND $oc_var.inc = 'y'
ORDER BY HCAF.LandDist";
*/
public void calcEnvelope(List<Object> speciesOccurrences){
calculatePercentiles(speciesOccurrences, $LandUpper, $LandLower);
//check if envelope is as broad as pre-defined minimum
if (PMax - PMin < 2)
{
double $ParaMid = (PMin + PMax) / Double.valueOf(2);
double $PMinTmp = $ParaMid - 1;
double $PMaxTmp = $ParaMid + 1;
//enforce a minimum preferred range as long as it doesn't extrapolate outer limits
if ($PMinTmp < Min) {//preferred Min value as is
}
else {PMin = $PMinTmp;}
if ($PMaxTmp > Max) { //preferred Max value as is
}
else {PMax = $PMaxTmp;}
}
// /check difference between min/max and pref. min/max
if (PMin - Min < 1)
{
double $MinTmp = PMin - 1;
if ($MinTmp > $LandLower) {Min = $MinTmp;}
else {Min = $LandLower;}
}
if (Max - PMax < 1)
{
double $MaxTmp = PMax + 1;
if ($MaxTmp < $LandUpper) {Max = $MaxTmp;}
else {Max = $LandUpper;}
}
}
public String toString(){
String exitString = "landdistmin='"+Min+"'," +
"landdistprefmin='"+PMin+"'," +
"landdistprefmax='"+PMax+"'," +
"landdistmax='"+Max+"'";
return exitString;
}
}

View File

@ -0,0 +1,65 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.List;
public class SpEnv_primprod extends AquamapsEnvelope{
/*
$strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.PrimProdMean
FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode
WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "'
AND HCAF.PrimProdMean is not null
AND HCAF.OceanArea > 0
AND $oc_var.inc = 'y'
ORDER BY HCAF.PrimProdMean";
*/
public void calcEnvelope(List<Object> speciesOccurrences){
calculatePercentiles(speciesOccurrences, $ProdUpper, $ProdLower);
//check if envelope is as broad as pre-defined minimum
if (PMax - PMin < 2)
{
double $ParaMid = (PMin + PMax) / Double.valueOf(2);
double $PMinTmp = $ParaMid - 1;
double $PMaxTmp = $ParaMid + 1;
//enforce a minimum preferred range as long as it doesn't extrapolate outer limits
if ($PMinTmp < Min) {//preferred Min value as is
}
else {PMin = $PMinTmp;}
if ($PMaxTmp > Max) {//preferred Max value as is
}
else {PMax = $PMaxTmp;}
}
//check difference between min/max and pref. min/max
if (PMin - Min < 1)
{
double $MinTmp = PMin - 1;
if ($MinTmp > $ProdLower) {Min = $MinTmp;}
else {Min = $ProdLower;}
}
if (Max - PMax < 1)
{
double $MaxTmp = PMax + 1;
if ($MaxTmp < $ProdUpper) {Max = $MaxTmp;}
else {Max = $ProdUpper;}
}
}
public String toString(){
String exitString = "primprodmin='"+Min+"'," +
"primprodprefmin='"+PMin+"'," +
"primprodprefmax='"+PMax+"'," +
"primprodmax='"+Max+"'";
return exitString;
}
}

View File

@ -0,0 +1,85 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.List;
public class SpEnv_salinity extends AquamapsEnvelope{
/*
$strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.$fld
FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode
WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "'
AND HCAF.$fld <> -9999
AND HCAF.$fld is not null
AND HCAF.OceanArea > 0
AND $oc_var.inc = 'y'
ORDER BY HCAF.$fld";
*/
public void calcEnvelope(List<Object> speciesOccurrences, String $layer){
double $SalinUp;
double $SalinLow;
if ($layer.equals("s"))
{
$SalinUp = $SalinUpper;
$SalinLow = $SalinLower;
}
else if ($layer.equals("b"))
{
$SalinUp = $SalinBUpper; //reset absolute min and max for bottom
$SalinLow = $SalinBLower;
}
else
{
$SalinUp = $SalinUpper;
$SalinLow = $SalinLower;
}
calculatePercentiles(speciesOccurrences, $SalinUp, $SalinLow);
//check if envelope is as broad as pre-defined minimum
if (PMax - PMin < 1)
{
double $ParaMid = (PMin + PMax) / Double.valueOf(2);
double $PMinTmp = $ParaMid - 0.5;
double $PMaxTmp = $ParaMid + 0.5;
//enforce a minimum preferred range as long as it doesn't extrapolate outer limits
if ($PMinTmp < Min) {
// preferred Min value as is
}
else {PMin = $PMinTmp;}
if ($PMaxTmp > Max) {//preferred Max value as is
}
else {PMax = $PMaxTmp;}
}
//check difference between min/max and pref. min/max
if (PMin - Min < 0.5)
{
double $MinTmp = PMin - 0.5;
if ($MinTmp > $SalinLower) {Min = $MinTmp;}
else {Min = $SalinLower;}
}
if (Max - PMax < 0.5)
{
double $MaxTmp = PMax + 0.5;
if ($MaxTmp < $SalinUpper) {Max = $MaxTmp;}
else {Max = $SalinUpper;}
}
}
public String toString(){
String exitString = "salinitymin='"+Min+"'," +
"salinityprefmin='"+PMin+"'," +
"salinityprefmax='"+PMax+"'," +
"salinitymax='"+Max+"'";
return exitString;
}
}

View File

@ -0,0 +1,63 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.List;
public class SpEnv_seaice extends AquamapsEnvelope{
/*
$strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.IceConAnn
FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode
WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "'
AND HCAF.IceConAnn is not null
AND HCAF.OceanArea > 0
AND $oc_var.inc = 'y'
ORDER BY HCAF.IceConAnn";
*/
//###################################################################################
//This file re-computes the temperature values (Min, PrefMin, Max, PrefMax based on
//area restriction parameters set by the user
//###################################################################################
public void calcEnvelope(List<Object> speciesOccurrences){
calculatePercentiles(speciesOccurrences, null, null);
//per KK and JR: extend IceMin - avoid exclusion of species from all non-ice covered areas
double $adjVal = -1; double $sumIce = 0; double $meanIce = 0;
//fix to -1 per KK (Me!AdjustIce value taken from form input)
//Mods May 2010: treat values <.01 as zero; per KK; revised during comparison with D4S2 Proj
if (Min < 0.01)
Min = 0.00;
if (Min == 0)
{
// $paramData = $conn->query($strSQL);
$sumIce = 0;
int $reccount = speciesOccurrences.size();
for (int i=0 ; i< $reccount ;i++){
Object[] $row = (Object[])speciesOccurrences.get(i);
double $IceConn = AquamapsEnvelopeAlgorithm.getNumber($row,2);
//ice concentration
$sumIce = $sumIce + $IceConn;
}
if($reccount != 0) {$meanIce = Double.valueOf($sumIce) / Double.valueOf($reccount);}
else {$meanIce = 0;}
Min = $adjVal + $meanIce;
}
}
public String toString(){
String exitString = "iceconmin='"+Min+"'," +
"iceconprefmin='"+PMin+"'," +
"iceconprefmax='"+PMax+"'," +
"iceconmax='"+Max+"'";
return exitString;
}
}

View File

@ -0,0 +1,89 @@
package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps;
import java.util.List;
public class SpEnv_temp extends AquamapsEnvelope {
/*
$strSQL="SELECT DISTINCT speciesoccursum.CsquareCode, speciesoccursum.SpeciesID, HCAF.$fld
FROM speciesoccursum INNER JOIN HCAF ON speciesoccursum.CsquareCode = HCAF.CsquareCode
WHERE speciesoccursum.SpeciesID = ' .. '
AND HCAF.$fld <> -9999
AND HCAF.$fld is not null
AND HCAF.OceanArea > 0
AND speciesoccursum.inc = 'y'
ORDER BY HCAF.$fld";
*/
//###################################################################################
//This file re-computes the temperature values (Min, PrefMin, Max, PrefMax based on
//area restriction parameters set by the user
//###################################################################################
public void calcEnvelope(String $layer, List<Object> speciesOccurrences){
calculatePercentiles(speciesOccurrences, $TempUpper, $TempLower);
double $spreadVal = 0;
if (Max <= 5) //then polar and deepwater species
{ $spreadVal = 0.25; }
else { $spreadVal = 1; }
if ((PMax - PMin) < $spreadVal)
{
double $ParaMid = (PMin + PMax) / 2f;
double $PMinTmp = $ParaMid - ($spreadVal / 2f);
double $PMaxTmp = $ParaMid + ($spreadVal / 2f);
//enforce a minimum preferred range as long as it doesn't extrapolate outer limits
if ($PMinTmp < Min)
{
//preferred Min value as is
}
else
{
PMin = $PMinTmp;
}
if ($PMaxTmp > Max)
{
//preferred Max value as is
}
else
{
PMax = $PMaxTmp;
}
}
//check difference between min/max and pref. min/max
if (PMin - Min < 0.5)
{
double $MinTmp = PMin - 0.5;
if ($MinTmp > $TempLower){Min = $MinTmp;}
else {Min = $TempLower;}
}
if (Max - PMax < 0.5)
{
double $MaxTmp = PMax + 0.5;
if ($MaxTmp < $TempUpper){Max = $MaxTmp;}
else {Max = $TempUpper;}
}
//check if envelope is as broad as pre-defined minimum
if (PMax >= 25)
{
Max = PMax + 4.2;
}
}
public String toString(){
String exitString = "tempmin='"+Min+"'," +
"tempprefmin='"+PMin+"'," +
"tempprefmax='"+PMax+"'," +
"tempmax='"+Max+"'";
return exitString;
}
}

View File

@ -0,0 +1,339 @@
package org.gcube.dataanalysis.ecoengine.processing;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class LocalSimpleSplitGenerator implements Generator {
private GenericConfiguration config;
private ExecutorService executorService;
private int numberOfThreadsToUse;
private boolean threadActivity[];
private boolean stopInterrupt;
private SpatialProbabilityDistributionGeneric distributionModel;
private int processedSpeciesCounter;
private int spaceVectorsNumber;
private List<Object> environmentVectors;
private long lastTime;
private int lastProcessedRecordsNumber;
private int processedRecordsCounter;
private float status;
private int chunksize;
ConcurrentLinkedQueue<String> probabilityBuffer;
//species Objects -> (geographical Object , Probability)
ConcurrentHashMap<Object,Map<Object,Float>> completeDistribution;
public LocalSimpleSplitGenerator(GenericConfiguration config) {
setConfiguration(config);
init();
}
public LocalSimpleSplitGenerator() {
}
@Override
public float getStatus() {
return status;
}
@Override
public String getResourceLoad() {
long tk = System.currentTimeMillis();
double activity = Double.valueOf(processedRecordsCounter - lastProcessedRecordsNumber) * 1000.00 / Double.valueOf(tk - lastTime);
lastTime = tk;
lastProcessedRecordsNumber = processedRecordsCounter;
ResourceLoad rs = new ResourceLoad(tk, activity);
return rs.toString();
}
@Override
public String getResources() {
Resources res = new Resources();
try {
for (int i = 0; i < numberOfThreadsToUse; i++) {
try {
double value = (threadActivity[i]) ? 100.00 : 0.00;
res.addResource("Thread_" + (i + 1), value);
} catch (Exception e1) {
}
}
} catch (Exception e) {
e.printStackTrace();
}
if ((res != null) && (res.list != null))
return HttpRequest.toJSon(res.list).replace("resId", "resID");
else
return "";
}
@Override
public String getLoad() {
long tk = System.currentTimeMillis();
double activity = processedSpeciesCounter;
ResourceLoad rs = new ResourceLoad(tk, activity);
return rs.toString();
}
@Override
public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile);
stopInterrupt = false;
completeDistribution = new ConcurrentHashMap<Object, Map<Object,Float>>();
try {
initModel();
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().error(e);
}
// probabilityBuffer = new Vector<String>();
probabilityBuffer = new ConcurrentLinkedQueue<String>();
}
private void initModel() throws Exception {
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.algorithmsFile);
String objectclass = p.getProperty(config.getModel());
distributionModel = (SpatialProbabilityDistributionGeneric) Class.forName(objectclass).newInstance();
distributionModel.init(config);
}
@Override
public void setConfiguration(GenericConfiguration config) {
this.config = config;
if (config.getNumberOfResources() == 0)
this.numberOfThreadsToUse = 1;
else
this.numberOfThreadsToUse = config.getNumberOfResources();
}
public void initializeThreads() {
// initialize threads and their activity state
executorService = Executors.newFixedThreadPool(numberOfThreadsToUse);
threadActivity = new boolean[numberOfThreadsToUse];
// initialize to false;
for (int j = 0; j < threadActivity.length; j++) {
threadActivity[j] = false;
}
}
public void shutdown() {
// shutdown threads
executorService.shutdown();
// shutdown connection
stopInterrupt = true;
}
@Override
public void stopProcess() {
stopInterrupt = true;
}
// waits for thread to be free
private void wait4Thread(int index) {
// wait until thread is free
while (threadActivity[index]) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
@Override
public void generate() throws Exception {
// INITIALIZATION
long tstart = System.currentTimeMillis();
try {
AnalysisLogger.getLogger().trace("generate->Take geographical information reference");
// take the area reference vectors
environmentVectors = distributionModel.getGeographicalInfoObjects();
AnalysisLogger.getLogger().trace("generate->Take species references");
List<Object> speciesVectors = distributionModel.getMainInfoObjects();
AnalysisLogger.getLogger().trace("generate->got all information");
// calculate the number of chunks needed
spaceVectorsNumber = environmentVectors.size();
int speciesVectorNumber = speciesVectors.size();
// calculate number of chunks to take into account
chunksize = spaceVectorsNumber / numberOfThreadsToUse;
if (chunksize == 0)
chunksize = 1;
int numOfChunks = spaceVectorsNumber / chunksize;
if ((spaceVectorsNumber % chunksize) != 0)
numOfChunks += 1;
AnalysisLogger.getLogger().trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " species - chunk size will be "+chunksize);
// initialize threads
initializeThreads();
// END INITIALIZATION
// overall chunks counter
int overallcounter = 0;
processedSpeciesCounter = 0;
// SPECIES CALCULATION
// cycle throw the species
for (Object species : speciesVectors) {
// calculation on multiple threads
// thread selection index
int currentThread = 0;
// take time
long computationT0 = System.currentTimeMillis();
// pre process for single species
distributionModel.singleStepPreprocess(species, spaceVectorsNumber);
AnalysisLogger.getLogger().trace("-> species " + distributionModel.getMainInfoID(species) + " - n. " + (processedSpeciesCounter + 1));
// CALCULATION CORE
for (int k = 0; k < numOfChunks; k++) {
// get the starting index
int start = k * chunksize;
// wait for thread to be free
wait4Thread(currentThread);
// start species information calculation on the thread
startNewTCalc(currentThread, species, start);
// increment thread selection index
currentThread++;
// reset current thread index
if (currentThread >= numberOfThreadsToUse) {
currentThread = 0;
}
// report probability
status = ((float) overallcounter / ((float) (speciesVectorNumber * numOfChunks))) * 100f;
if (status == 100)
status = 99f;
// AnalysisLogger.getLogger().trace("STATUS->"+status+"%");
// increment global counter index
overallcounter++;
}
// END OF CALCULATION CORE
// wait for last threads to finish
for (int i = 0; i < numberOfThreadsToUse; i++) {
// free previous calculation
wait4Thread(i);
}
long computationT1 = System.currentTimeMillis();
// flushBuffer();
AnalysisLogger.getLogger().trace("generate->Species Computation Finished in " + (computationT1 - computationT0) + " ms");
// perform overall insert
// insertCriteria();
// increment the count of processed species
processedSpeciesCounter++;
// REPORT ELAPSED TIME
// post process for single species
distributionModel.singleStepPostprocess(species, spaceVectorsNumber);
// if the process was stopped then interrupt the processing
if (stopInterrupt)
break;
}
long computationT2 = System.currentTimeMillis();
// flushInterrupt = true;
AnalysisLogger.getLogger().trace("generate->All Species Computed in " + (computationT2 - tstart) + " ms");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().error(e);
throw e;
} finally {
// REPORT OVERALL ELAPSED TIME
distributionModel.postProcess();
AnalysisLogger.getLogger().trace("generate-> Storing Probability Distribution");
distributionModel.storeDistribution(completeDistribution);
// shutdown all
shutdown();
long tend = System.currentTimeMillis();
long ttotal = tend - tstart;
AnalysisLogger.getLogger().warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n");
status = 100f;
}
}
// end Definition of the Thread
// activation
private void startNewTCalc(int index, Object speciesVector, int start) {
threadActivity[index] = true;
ThreadCalculator tc = new ThreadCalculator(index, speciesVector, start);
executorService.submit(tc);
}
// THREAD SECTION
// definition of the Thread
private class ThreadCalculator implements Callable<Integer> {
int threadIndex;
int spaceindex;
Object speciesVector;
public ThreadCalculator(int threadIndex, Object speciesVector, int start) {
this.threadIndex = threadIndex;
this.speciesVector = speciesVector;
this.spaceindex = start;
}
public Integer call() {
AnalysisLogger.getLogger().trace("threadCalculation->" + (threadIndex+1));
int max = Math.min(spaceindex + chunksize, spaceVectorsNumber);
String speciesID = distributionModel.getMainInfoID(speciesVector);
AnalysisLogger.getLogger().trace("threadCalculation-> calculating elements from "+spaceindex+" to " + max +" for species "+speciesID);
Map<Object,Float> geoDistrib = completeDistribution.get(speciesID);
//if the map is null then generate a new map, otherwise update it
if (geoDistrib==null){
geoDistrib = new ConcurrentHashMap<Object, Float>();
completeDistribution.put(speciesVector, geoDistrib);
}
for (int i = spaceindex; i < max; i++) {
float prob = distributionModel.calcProb(speciesVector, environmentVectors.get(i));
// String geographicalID = distributionModel.getGeographicalID(environmentVectors.get(i));
if (prob > 0.1) {
//record the overall probability distribution
geoDistrib.put(environmentVectors.get(i), prob);
}
processedRecordsCounter++;
}
threadActivity[threadIndex] = false;
return 0;
}
}
@Override
public ALG_PROPS[] getSupportedAlgorithms() {
ALG_PROPS[] p = {ALG_PROPS.PHENOMENON_VS_GEOINFO};
return p;
}
@Override
public GENERATOR_WEIGHT getWeight() {
return GENERATOR_WEIGHT.LOWEST;
}
}

View File

@ -0,0 +1,465 @@
package org.gcube.dataanalysis.ecoengine.processing;
import java.util.List;
import java.util.Properties;
import java.util.Queue;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory;
public class LocalSplitGenerator implements Generator {
private GenericConfiguration config;
private ExecutorService executorService;
private int numberOfThreadsToUse;
private boolean threadActivity[];
private SessionFactory dbHibConnection;
private boolean stopInterrupt;
private boolean flushInterrupt;
private SpatialProbabilityDistributionTable distributionModel;
private int processedSpeciesCounter;
private int spaceVectorsNumber;
private List<Object> environmentVectors;
private long lastTime;
private int lastProcessedRecordsNumber;
private int processedRecordsCounter;
private float status;
private int chunksize;
ConcurrentLinkedQueue<String> probabilityBuffer;
String probabilityInsertionStatement = "insert into %1$s (speciesid,csquarecode,probability %ADDEDINFORMATION%) VALUES %2$s";
public LocalSplitGenerator(GenericConfiguration config) {
setConfiguration(config);
init();
}
public LocalSplitGenerator() {
}
@Override
public float getStatus() {
return status;
}
@Override
public String getResourceLoad() {
long tk = System.currentTimeMillis();
double activity = Double.valueOf(processedRecordsCounter - lastProcessedRecordsNumber) * 1000.00 / Double.valueOf(tk - lastTime);
lastTime = tk;
lastProcessedRecordsNumber = processedRecordsCounter;
ResourceLoad rs = new ResourceLoad(tk, activity);
return rs.toString();
}
@Override
public String getResources() {
Resources res = new Resources();
try {
for (int i = 0; i < numberOfThreadsToUse; i++) {
try {
double value = (threadActivity[i]) ? 100.00 : 0.00;
res.addResource("Thread_" + (i + 1), value);
} catch (Exception e1) {
}
}
} catch (Exception e) {
e.printStackTrace();
}
if ((res != null) && (res.list != null))
return HttpRequest.toJSon(res.list).replace("resId", "resID");
else
return "";
}
@Override
public String getLoad() {
long tk = System.currentTimeMillis();
double activity = processedSpeciesCounter;
ResourceLoad rs = new ResourceLoad(tk, activity);
return rs.toString();
}
@Override
public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile);
stopInterrupt = false;
flushInterrupt = false;
initDBSession();
try {
initModel();
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().error(e);
}
// probabilityBuffer = new Vector<String>();
probabilityBuffer = new ConcurrentLinkedQueue<String>();
String addedinfo = distributionModel.getAdditionalMetaInformation();
if (addedinfo == null)
addedinfo = "";
else
addedinfo = "," + addedinfo.trim();
probabilityInsertionStatement = probabilityInsertionStatement.replace("%ADDEDINFORMATION%", addedinfo);
if (!distributionModel.isSynchronousProbabilityWrite()) {
AnalysisLogger.getLogger().trace("init()->insertion scheduler initialized");
// inizialize the scheduler for the insertions
Timer writerScheduler = new Timer();
writerScheduler.schedule(new DatabaseWriter(), 0, GenericConfiguration.refreshResourcesTime);
}
}
private void initModel() throws Exception {
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.algorithmsFile);
String objectclass = p.getProperty(config.getModel());
distributionModel = (SpatialProbabilityDistributionTable) Class.forName(objectclass).newInstance();
distributionModel.init(config, dbHibConnection);
}
@Override
public void setConfiguration(GenericConfiguration config) {
this.config = config;
if (config.getNumberOfResources() == 0)
this.numberOfThreadsToUse = 1;
else
this.numberOfThreadsToUse = config.getNumberOfResources();
}
public void initializeThreads() {
// initialize threads and their activity state
executorService = Executors.newFixedThreadPool(numberOfThreadsToUse);
threadActivity = new boolean[numberOfThreadsToUse];
// initialize to false;
for (int j = 0; j < threadActivity.length; j++) {
threadActivity[j] = false;
}
}
public void initDBSession() {
try {
if ((config != null) && (config.getConfigPath() != null)) {
String defaultDatabaseFile = config.getConfigPath() + GenericConfiguration.defaultConnectionFile;
dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
}
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().trace(e);
}
}
private void createTable() throws Exception {
if (config.createTable()) {
try {
DatabaseFactory.executeSQLUpdate("drop table " + config.getDistributionTable(), dbHibConnection);
} catch (Exception e) {
}
DatabaseFactory.executeUpdateNoTransaction(distributionModel.getDistributionTableStatement(), config.getDatabaseDriver(), config.getDatabaseUserName(), config.getDatabasePassword(), config.getDatabaseURL(), true);
AnalysisLogger.getLogger().trace("createTable()->OK!");
}
}
public void shutdown() {
// shutdown threads
executorService.shutdown();
// shutdown connection
stopInterrupt = true;
if (!distributionModel.isSynchronousProbabilityWrite()) {
while (!flushInterrupt) {
try {
Thread.sleep(100);
} catch (Exception e) {
}
}
}
AnalysisLogger.getLogger().trace("CLOSING CONNECTIONS");
dbHibConnection.close();
}
@Override
public void stopProcess() {
stopInterrupt = true;
}
// waits for thread to be free
private void wait4Thread(int index) {
// wait until thread is free
while (threadActivity[index]) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
@Override
public void generate() throws Exception {
// INITIALIZATION
long tstart = System.currentTimeMillis();
try {
AnalysisLogger.getLogger().trace("generate->Check for table creation");
createTable();
AnalysisLogger.getLogger().trace("generate->Take area reference");
// take the area reference vectors
environmentVectors = DatabaseFactory.executeSQLQuery(distributionModel.getGeographicalInfoQuery(), dbHibConnection);
AnalysisLogger.getLogger().trace("generate->Take species reference");
List<Object> speciesVectors = DatabaseFactory.executeSQLQuery(distributionModel.getMainInfoQuery(), dbHibConnection);
AnalysisLogger.getLogger().trace("generate->got all information");
// calculate the number of chunks needed
spaceVectorsNumber = environmentVectors.size();
int speciesVectorNumber = speciesVectors.size();
// calculate number of chunks to take into account
chunksize = spaceVectorsNumber / numberOfThreadsToUse;
if (chunksize == 0)
chunksize = 1;
int numOfChunks = spaceVectorsNumber / chunksize;
if ((spaceVectorsNumber % chunksize) != 0)
numOfChunks += 1;
AnalysisLogger.getLogger().trace("generate->Calculation Started with " + numOfChunks + " chunks and " + speciesVectorNumber + " species");
// initialize threads
initializeThreads();
// END INITIALIZATION
// overall chunks counter
int overallcounter = 0;
processedSpeciesCounter = 0;
// SPECIES CALCULATION
// cycle throw the species
for (Object species : speciesVectors) {
// calculation on multiple threads
// thread selection index
int currentThread = 0;
// take time
long computationT0 = System.currentTimeMillis();
// pre process for single species
distributionModel.singleStepPreprocess(species, spaceVectorsNumber);
AnalysisLogger.getLogger().trace("-> species " + distributionModel.getMainInfoID(species) + " - n. " + (processedSpeciesCounter + 1));
// CALCULATION CORE
for (int k = 0; k < numOfChunks; k++) {
// get the starting index
int start = k * chunksize;
// wait for thread to be free
wait4Thread(currentThread);
// start species information calculation on the thread
startNewTCalc(currentThread, species, start);
// increment thread selection index
currentThread++;
// reset current thread index
if (currentThread >= numberOfThreadsToUse) {
currentThread = 0;
}
// report probability
status = ((float) overallcounter / ((float) (speciesVectorNumber * numOfChunks))) * 100f;
if (status == 100)
status = 99f;
// AnalysisLogger.getLogger().trace("STATUS->"+status+"%");
// increment global counter index
overallcounter++;
}
// END OF CALCULATION CORE
// wait for last threads to finish
for (int i = 0; i < numberOfThreadsToUse; i++) {
// free previous calculation
wait4Thread(i);
}
if (distributionModel.isSynchronousProbabilityWrite()) {
probabilityBuffer = (ConcurrentLinkedQueue<String>) distributionModel.filterProbabilitySet((Queue<String>) probabilityBuffer);
DatabaseWriter dbw = new DatabaseWriter();
dbw.flushBuffer();
}
long computationT1 = System.currentTimeMillis();
// flushBuffer();
AnalysisLogger.getLogger().trace("generate->Species Computation Finished in " + (computationT1 - computationT0) + " ms");
// perform overall insert
// insertCriteria();
// increment the count of processed species
processedSpeciesCounter++;
// REPORT ELAPSED TIME
// post process for single species
distributionModel.singleStepPostprocess(species, spaceVectorsNumber);
// if the process was stopped then interrupt the processing
if (stopInterrupt)
break;
}
long computationT2 = System.currentTimeMillis();
// flushInterrupt = true;
AnalysisLogger.getLogger().trace("generate->All Species Computed in " + (computationT2 - tstart) + " ms");
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().error(e);
throw e;
} finally {
// REPORT OVERALL ELAPSED TIME
distributionModel.postProcess();
// shutdown all
shutdown();
long tend = System.currentTimeMillis();
long ttotal = tend - tstart;
AnalysisLogger.getLogger().warn("generate->Distribution Generator->Algorithm finished in: " + ((double) ttotal / (double) 60000) + " min\n");
status = 100f;
}
}
// end Definition of the Thread
// activation
private void startNewTCalc(int index, Object speciesVector, int start) {
threadActivity[index] = true;
ThreadCalculator tc = new ThreadCalculator(index, speciesVector, start);
executorService.submit(tc);
}
// THREAD SECTION
// definition of the Thread
private class ThreadCalculator implements Callable<Integer> {
int threadIndex;
int spaceindex;
Object speciesVector;
public ThreadCalculator(int threadIndex, Object speciesVector, int start) {
this.threadIndex = threadIndex;
this.speciesVector = speciesVector;
this.spaceindex = start;
}
public Integer call() {
// AnalysisLogger.getLogger().trace("threadCalculation->" + (threadIndex+1));
int max = Math.min(spaceindex + chunksize, spaceVectorsNumber);
String speciesID = distributionModel.getMainInfoID(speciesVector);
for (int i = spaceindex; i < max; i++) {
float prob = distributionModel.calcProb(speciesVector, environmentVectors.get(i));
String geographicalID = distributionModel.getGeographicalID(environmentVectors.get(i));
if (prob > 0.1) {
String additionalInformation = distributionModel.getAdditionalInformation(speciesVector, environmentVectors.get(i));
if (additionalInformation == null)
additionalInformation = "";
else if (additionalInformation.length() > 0)
additionalInformation = "," + additionalInformation.trim();
// probabilityBuffer.offer("'" + speciesID + "','" + geographicalID + "','" + MathFunctions.roundDecimal(prob, 2) + "'"+additionalInformation);
probabilityBuffer.offer("'" + speciesID + "','" + geographicalID + "','" + MathFunctions.roundDecimal(prob, 2) + "'" + additionalInformation);
}
processedRecordsCounter++;
}
threadActivity[threadIndex] = false;
return 0;
}
}
// Database insertion thread
private class DatabaseWriter extends TimerTask {
public DatabaseWriter() {
}
public void run() {
try {
if (stopInterrupt) {
AnalysisLogger.getLogger().trace("\t...flushing on db");
// flush the objects
flushBuffer();
AnalysisLogger.getLogger().trace("\t...finished flushing on db");
flushInterrupt = true;
this.cancel();
} else if ((probabilityBuffer != null) && (probabilityBuffer.size() > GenericConfiguration.chunkSize)) {
// AnalysisLogger.getLogger().trace("\t...writing on db");
writeOnDB(GenericConfiguration.chunkSize);
// AnalysisLogger.getLogger().trace("\t...finished writing on db");
}
} catch (Exception e) {
e.printStackTrace();
AnalysisLogger.getLogger().error(e);
flushInterrupt = true;
}
}
public void flushBuffer() {
if ((probabilityBuffer != null) && (probabilityBuffer.size() > 0)) {
while (probabilityBuffer.size() > GenericConfiguration.chunkSize)
writeOnDB(GenericConfiguration.chunkSize);
writeOnDB(probabilityBuffer.size());
}
}
private void writeOnDB(int endIndex) {
if (endIndex > 0) {
StringBuffer sb = new StringBuffer();
// AnalysisLogger.getLogger().trace("writeOnDB()->PROBABILITIES BUFFER SIZE DELETION");
for (int i = 0; i < endIndex; i++) {
sb.append("(" + distributionModel.filterProbabiltyRow(probabilityBuffer.poll()) + ")");
if (i < endIndex - 1) {
sb.append(",");
}
}
String insertionString = String.format(probabilityInsertionStatement, config.getDistributionTable(), sb.toString());
try {
// AnalysisLogger.getLogger().debug("->"+insertionString);
DatabaseFactory.executeSQLUpdate(insertionString, dbHibConnection);
} catch (Exception e) {
e.printStackTrace();
}
AnalysisLogger.getLogger().trace("writeOnDB()->PROBABILITIES BUFFER REMAINING:" + probabilityBuffer.size());
sb = null;
}
}
}
@Override
public ALG_PROPS[] getSupportedAlgorithms() {
ALG_PROPS[] p = {ALG_PROPS.SPECIES_VS_CSQUARE_FROM_DATABASE};
return p;
}
@Override
public GENERATOR_WEIGHT getWeight() {
return GENERATOR_WEIGHT.LOWEST;
}
}

View File

@ -0,0 +1,181 @@
package org.gcube.dataanalysis.ecoengine.processing;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GENERATOR_WEIGHT;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteGenerationManager;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecInputObject;
import org.gcube.dataanalysis.ecoengine.connectors.RemoteHspecOutputObject;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
public class RainyCloudGenerator implements Generator{
GenericConfiguration config;
private boolean interruptProcessing;
RemoteGenerationManager remoteGenerationManager;
RemoteHspecInputObject rhio;
public RainyCloudGenerator(GenericConfiguration config) {
setConfiguration(config);
init();
}
public RainyCloudGenerator() {
}
@Override
public float getStatus() {
RemoteHspecOutputObject oo = remoteGenerationManager.retrieveCompleteStatus();
// if (oo.status.equals("DONE")||oo.status.equals("ERROR"))
if (oo.status.equals("DONE"))
{
stopProcess();
return 100f;
}
else {
float remoteStatus =(float)remoteGenerationManager.retrieveCompletion();
return (remoteStatus==100)?99:remoteStatus;
}
}
@Override
public void init() {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile);
interruptProcessing = false;
rhio = new RemoteHspecInputObject();
rhio.userName = config.getServiceUserName();
rhio.environment = config.getRemoteEnvironment();
rhio.configuration = config.getGeneralProperties();
rhio.generativeModel = config.getModel();
String jdbcUrl = config.getDatabaseURL();
String userName = config.getDatabaseUserName();
String password = config.getDatabasePassword();
jdbcUrl += ";username="+userName+";password="+password;
//jdbc:sqlserver://localhost;user=MyUserName;password=*****;
rhio.hcafTableName.tableName = config.getCsquarecodesTable();
rhio.hcafTableName.jdbcUrl=jdbcUrl;
rhio.hspecDestinationTableName.tableName = config.getDistributionTable();
rhio.hspecDestinationTableName.jdbcUrl=jdbcUrl;
rhio.hspenTableName.tableName = config.getEnvelopeTable();
rhio.hspenTableName.jdbcUrl=jdbcUrl;
rhio.occurrenceCellsTable.tableName = "maxminlat_"+config.getEnvelopeTable();
rhio.occurrenceCellsTable.jdbcUrl=jdbcUrl;
rhio.nWorkers = config.getNumberOfResources();
if (config.getModel().contains("2050"))
rhio.is2050 = true;
else
rhio.is2050 = false;
if (config.getModel().contains("NATIVE"))
rhio.isNativeGeneration = true;
else
rhio.isNativeGeneration = false;
//create and call the remote generator
remoteGenerationManager = new RemoteGenerationManager(config.getRemoteCalculator());
}
@Override
public void setConfiguration(GenericConfiguration config) {
this.config = config;
}
@Override
public void shutdown() {
}
@Override
public void stopProcess() {
interruptProcessing = true;
}
@Override
public String getResourceLoad() {
String returnString = "[]";
try{
RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus();
if (rhoo.metrics.throughput.size()>1)
{
ResourceLoad rs = new ResourceLoad(rhoo.metrics.throughput.get(0),rhoo.metrics.throughput.get(1));
returnString = rs.toString();
}
}catch(Exception e){}
return returnString;
}
@Override
public String getResources() {
Resources res = new Resources();
try{
RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus();
res.list = rhoo.metrics.load;
}catch(Exception e){}
if ((res!=null) && (res.list!=null))
return HttpRequest.toJSon(res.list).replace("resId", "resID");
else
return "[]";
}
@Override
public String getLoad() {
RemoteHspecOutputObject rhoo = remoteGenerationManager.retrieveCompleteStatus();
String returnString = "[]";
if ((rhoo.metrics.throughput!=null)&&(rhoo.metrics.throughput.size()>1))
{
ResourceLoad rs = new ResourceLoad(rhoo.metrics.throughput.get(0),rhoo.metrics.throughput.get(1));
returnString = rs.toString();
}
return returnString;
}
@Override
public void generate() throws Exception {
try{
remoteGenerationManager.submitJob(rhio);
}catch(Exception e){
e.printStackTrace();
}
AnalysisLogger.getLogger().trace("REMOTE PROCESSING STARTED");
boolean finish = false;
while (!finish && !interruptProcessing){
float status = getStatus();
// AnalysisLogger.getLogger().trace("Status "+status);
if (status==100) finish = true;
Thread.sleep(500);
}
AnalysisLogger.getLogger().trace("REMOTE PROCESSING ENDED");
}
@Override
public ALG_PROPS[] getSupportedAlgorithms() {
ALG_PROPS[] p = {ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE};
return p;
}
@Override
public GENERATOR_WEIGHT getWeight() {
return GENERATOR_WEIGHT.HIGH;
}
}

View File

@ -0,0 +1,181 @@
package org.gcube.dataanalysis.ecoengine.processing.factories;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import java.util.ServiceLoader;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistribution;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class GeneratorsFactory {
public static void main(String[] args) throws Exception {
ServiceLoader<SpatialProbabilityDistributionGeneric> sp = ServiceLoader.load(SpatialProbabilityDistributionGeneric.class);
for (SpatialProbabilityDistributionGeneric distrib:sp){
System.out.println(distrib.getName());
}
/*
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_remote_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setModel("TEST");
List<Generator> gens = GeneratorsFactory.getGenerators(config);
for (int i=0;i<gens.size();i++){
System.out.println(gens.get(i).getClass());
gens.get(i).init();
}
*/
}
public static void main1(String[] args) throws Exception {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_remote_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setModel("AQUAMAPS_SUITABLE");
config.setGenerator("LOCAL_WITH_DATABASE");
Generator gen = GeneratorsFactory.getGenerator(config);
System.out.println(gen.getClass());
}
public static Generator getGenerator(GenericConfiguration config) throws Exception {
//modify this class in order to take the right generator algorithm
try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile);
//take the algorithm
String algorithm = config.getGenerator();
if (algorithm == null) throw new Exception("GENERATOR NOT SPECIFIED");
//take the algorithms list
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.generatorsFile);
String algorithmclass = p.getProperty(algorithm);
Object algclass = Class.forName(algorithmclass).newInstance();
Generator g = (Generator) algclass;
g.setConfiguration(config);
g.init();
return g;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public static List<Generator> getGenerators(GenericConfiguration config) throws Exception {
//modify this class in order to manage generators weight and match algorithm vs generators
List<Generator> generators = new ArrayList<Generator>();
try {
//initialize the logger
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile);
//take the algorithm
String algorithm = config.getModel();
//take the algorithms list
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.algorithmsFile);
String algorithmclass = p.getProperty(algorithm);
Object algclass = Class.forName(algorithmclass).newInstance();
//if the algorithm is a generator itself then execute it
if (algclass instanceof Generator){
Generator g = (Generator) algclass;
g.setConfiguration(config);
generators.add(g);
}
else
{
SpatialProbabilityDistribution sp = (SpatialProbabilityDistribution) algclass;
//take alg's properties
ALG_PROPS[] algp = sp.getProperties();
//take all generators
Properties pg = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.generatorsFile);
//investigate on possible suitable generators
for (Object generatorName:pg.values()){
Generator gen = (Generator)Class.forName((String)generatorName).newInstance();
ALG_PROPS[] supportedAlgs = gen.getSupportedAlgorithms();
boolean genSuitable = false;
for (ALG_PROPS prop:algp){
for (ALG_PROPS gprop:supportedAlgs){
if (gprop == prop){
genSuitable = true;
break;
}
}
}
//if suitable generator was found then add it at the right place in the list
if (genSuitable){
gen.setConfiguration(config);
addGenerator2List(generators,gen);
}
}
}
return generators;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
//adds a generator to a sorted generators list
public static void addGenerator2List(List<Generator> generators, Generator generator){
int i=0;
boolean inserted = false;
for (Generator g: generators){
if (g.getWeight().compareTo(generator.getWeight())>0){
generators.add(i, generator);
inserted = true;
break;
}
i++;
}
if (!inserted)
generators.add(generator);
}
}

View File

@ -0,0 +1,30 @@
package org.gcube.dataanalysis.ecoengine.processing.factories;
import java.util.Properties;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Model;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
public class ModelersFactory {
public static Modeler getGenerator(GenericConfiguration config) throws Exception {
try {
AnalysisLogger.setLogger(config.getConfigPath() + GenericConfiguration.defaultLoggerFile);
Properties p = GenericConfiguration.getProperties(config.getConfigPath() + GenericConfiguration.modelsFile);
String objectclass = p.getProperty(config.getModel() + "_MODELER");
Modeler g = (Modeler) Class.forName(objectclass).newInstance();
String modelclass = p.getProperty(config.getModel());
Model m = (Model) Class.forName(modelclass).newInstance();
g.setmodel(m);
return g;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}

View File

@ -0,0 +1,886 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import java.util.HashMap;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
public class AquamapsAlgorithmCore {
//gets the initialization value for a string object
public static String getElement(Object[] featuresVector,int index){
if (featuresVector[index] != null) return ""+featuresVector[index];
else return null;
}
//gets the initialization value for a numeric object
public static double getNumber(Object[] featuresVector,int index){
double number = -9999;
try{
number = Double.parseDouble(""+featuresVector[index]);
}catch(Exception e){}
return number;
}
//get BB for a Species
public HashMap<String,String> getBoundingBoxInfo (String $paramData_NMostLat,String $paramData_SMostLat,String $paramData_WMostLong, String $paramData_EMostLong, Object[] maxMinLat,String $type){
// START N-S Bounding Box adjustment values - copied from procBB_2050_next.php
String $southern_hemisphere_adjusted = "n";
String $northern_hemisphere_adjusted = "n";
String $pass_NS = "n";
String $pass_N = "n";
String $pass_S = "n";
HashMap<String,String> boundingInfo = new HashMap<String, String>();
// START E-W Bounding Box adjustment values if 2050 map data - copied from procBB_2050.php
if (($type!=null) && $type.equals("2050")) {
/*this will extend 10 degrees E-W direction if bounding box exists (N,S,W,E values available)*/
double $Wmost = ($paramData_WMostLong==null)? -9999: Double.parseDouble($paramData_WMostLong);
double $Emost = ($paramData_EMostLong==null)? -9999: Double.parseDouble($paramData_EMostLong);
double $n = 0;
if ( (($paramData_NMostLat!=null)&&($paramData_NMostLat.length()>0)) &&
(($paramData_SMostLat!=null)&&($paramData_SMostLat.length()>0)) &&
(($paramData_WMostLong!=null)&&($paramData_WMostLong.length()>0)) &&
(($paramData_EMostLong!=null)&&($paramData_EMostLong.length()>0))
)
{
$paramData_NMostLat = ""+(Double.parseDouble($paramData_NMostLat) + 10); if(Double.parseDouble($paramData_NMostLat) > 90) {$paramData_NMostLat = "90";}
$paramData_SMostLat = ""+(Double.parseDouble($paramData_SMostLat) - 10); if(Double.parseDouble($paramData_SMostLat) < -90) {$paramData_SMostLat = "-90";}
if ( $paramData_WMostLong.equals("-180") &&
$paramData_EMostLong.equals("180")
) //case is circumglobal
{ }
else
{
$paramData_WMostLong = ""+(Double.parseDouble($paramData_WMostLong) - 10);
if(Double.parseDouble($paramData_WMostLong) < 0)
{
if(Double.parseDouble($paramData_WMostLong) < -180) {$n = (Double.parseDouble($paramData_WMostLong) + 180) * (-1); $paramData_WMostLong = ""+(180 - $n);}
}
else
{
if(Double.parseDouble($paramData_WMostLong) > 180) {$n = Double.parseDouble($paramData_WMostLong) - 180; $paramData_WMostLong = ""+(-180 + $n);}
}
$paramData_EMostLong = ""+(Double.parseDouble($paramData_EMostLong) + 10);
if(Double.parseDouble($paramData_EMostLong) < 0)
{
if(Double.parseDouble($paramData_EMostLong) < -180) {
$n = (Double.parseDouble($paramData_EMostLong) + 180) * (-1); $paramData_EMostLong = ""+(180 - $n);
}
}
else
{
if(Double.parseDouble($paramData_EMostLong) > 180) {
$n = Double.parseDouble($paramData_EMostLong) - 180; $paramData_EMostLong = ""+(-180 + $n);
}
}
}
//start kathy's routine {to check if overlapping longitude}
double $diff = Double.parseDouble($paramData_WMostLong) - Double.parseDouble($paramData_EMostLong); //used by case 1 & 2
//case 1
if ( ($Wmost < 0) && ($Emost < 0) && ($Wmost >= $Emost) && (($diff <= 0) && ($diff >= -20)) ) {
$paramData_WMostLong = "-180"; $paramData_EMostLong = "180";
}
//case 2
if ( ($Wmost > 0) && ($Emost > 0) && ($Wmost >= $Emost) && (($diff <= 0) && ($diff >= -20)) ) {
$paramData_WMostLong = "-180"; $paramData_EMostLong = "180";
}
//case 3
if ( ($Wmost <= -170) && ($Emost >= 170)) {
$paramData_WMostLong = "-180"; $paramData_EMostLong = "180"; }
//case 4
if ( ($Wmost >= 0 && $Wmost <= 10) && ($Emost <= 0 && $Emost >= -10)) {
$paramData_WMostLong = "-180"; $paramData_EMostLong = "180";
}
//end kathy's routine
}//if all bb are filled
else {
//no bounding box
}
}
// END E-W Bounding Box adjustment values if 2050 map data
if ( (($paramData_NMostLat==null)||($paramData_NMostLat.length()==0)) ||
(($paramData_SMostLat==null)||($paramData_SMostLat.length()==0)) ||
(($paramData_WMostLong==null)||($paramData_WMostLong.length()==0)) ||
(($paramData_EMostLong==null)||($paramData_EMostLong.length()==0))
)
{
if (($paramData_NMostLat!=null)&&($paramData_NMostLat.length()>0) && ($paramData_SMostLat!=null) && ($paramData_SMostLat.length()>0)){$pass_NS = "y";}
else if (($paramData_NMostLat!=null)&& ($paramData_NMostLat.length()>0)){$pass_N = "y";}
else if (($paramData_SMostLat!=null) && ($paramData_SMostLat.length()>0)){$pass_S = "y";}
else{
//String $qry="SELECT DISTINCT Max(hcaf.CenterLat) AS maxCLat, Min(hcaf.CenterLat) AS minCLat FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode WHERE (((hcaf.OceanArea > 0))) AND $oc_var.SpeciesID = '$SpeciesID' AND $oc_var.GoodCell <> 0";
double $maxCLat = 0;
double $minCLat = 0;
try{
$maxCLat = Double.parseDouble(""+maxMinLat[0]);
$minCLat = Double.parseDouble(""+maxMinLat[1]);
}catch(Exception ex ){}
if ($minCLat > 10) {
$paramData_SMostLat="0";
$southern_hemisphere_adjusted = "y";
}
else if ($maxCLat < -10) {
$paramData_NMostLat="0";
$northern_hemisphere_adjusted = "y";
}
}
}
// END Bounding Box adjustment values
boundingInfo.put("$southern_hemisphere_adjusted", $southern_hemisphere_adjusted);
boundingInfo.put("$northern_hemisphere_adjusted", $northern_hemisphere_adjusted);
boundingInfo.put("$pass_NS", $pass_NS);
boundingInfo.put("$pass_N", $pass_N);
boundingInfo.put("$pass_S", $pass_S);
boundingInfo.put("$paramData_NMostLat", $paramData_NMostLat);
boundingInfo.put("$paramData_SMostLat", $paramData_SMostLat);
boundingInfo.put("$paramData_WMostLong", $paramData_WMostLong);
boundingInfo.put("$paramData_EMostLong", $paramData_EMostLong);
return boundingInfo;
}
//calculate BB and FAOAreas flags for (species,csquare)
public HashMap<String,Integer> calculateBoundingBox(String csquarecode,String $pass_NS,String $pass_N,String $pass_S,
String $CenterLat,String $CenterLong,String $FAOAreaM,
String $paramData_NMostLat, String $paramData_SMostLat,String $paramData_WMostLong,String $paramData_EMostLong,String $paramData_FAOAreas,
String $northern_hemisphere_adjusted, String $southern_hemisphere_adjusted){
// if (csquarecode.equals("7112:123:4"))
// System.out.println();
// Get values for $InFAO and $InBox these will be used as FILTERS
int $InFAO=0;
int $InBox=0;
int $InLong;
String $tmpstr="";
HashMap<String,Integer> AreaInfo = new HashMap<String, Integer>();
// START $InBox
//start adjustment on N or S or NS limit exists
if($pass_NS.equals("y"))
{ if (Double.parseDouble($CenterLat) >= Double.parseDouble($paramData_SMostLat)
&&
Double.parseDouble($CenterLat) <= Double.parseDouble($paramData_NMostLat)) {$InBox = 1; }
}else if($pass_N.equals("y")){
if (Double.parseDouble($CenterLat) <= Double.parseDouble($paramData_NMostLat)) {$InBox = 1;}
}else if($pass_S.equals("y")){
if (Double.parseDouble($CenterLat) >= Double.parseDouble($paramData_SMostLat)) {$InBox = 1; }
}else{
//start hemispheres using good cells
if($southern_hemisphere_adjusted.equals("y")){
if(Double.parseDouble($CenterLat) > 0)
{ $InBox = 1; }
}else if($northern_hemisphere_adjusted.equals("y")){
if(Double.parseDouble($CenterLat) < 0) {$InBox = 1;}
}else{
$InBox = 0;
}
//end hemispheres using good cells
}
if ( ($paramData_NMostLat!= null && $paramData_NMostLat.length()>0) &&
($paramData_SMostLat!= null && $paramData_SMostLat.length()>0) &&
($paramData_WMostLong!= null && $paramData_WMostLong.length()>0) &&
($paramData_EMostLong!= null && $paramData_EMostLong.length()>0)
)
{
//'handle longitude crossing the date line
if (Double.parseDouble($paramData_WMostLong) > Double.parseDouble($paramData_EMostLong))
{
if (
(Double.parseDouble($CenterLong) >= Double.parseDouble($paramData_EMostLong)) &&
(Double.parseDouble($CenterLong) <= Double.parseDouble($paramData_WMostLong))
)
{$InLong = 0;}
else
{$InLong = 1;}
}
else
{
if (
(Double.parseDouble($CenterLong) >= Double.parseDouble($paramData_WMostLong)) &&
(Double.parseDouble($CenterLong) <= Double.parseDouble($paramData_EMostLong))
)
{$InLong = 1;}
else
{$InLong = 0;}
}
if (
(Double.parseDouble($CenterLat) >= Double.parseDouble($paramData_SMostLat)) &&
(Double.parseDouble($CenterLat) <= Double.parseDouble($paramData_NMostLat)) &&
$InLong == 1
)
{
$InBox = 1;
}
else
{
$InBox = 0;
}
}
//end new from skit nov 2006
//START $InFAO
//'check FAO area
if ( $FAOAreaM == null || $FAOAreaM.length() == 0 )
{
$InFAO = 0;
}
else
{
$tmpstr = $FAOAreaM;
if ( ($paramData_FAOAreas!=null) && $paramData_FAOAreas.contains($tmpstr))
{
$InFAO = 1;
}
else
{
$InFAO = 0;
}
}
AreaInfo.put("$InBox",$InBox);
AreaInfo.put("$InFAO",$InFAO);
return AreaInfo;
}
//help functions
private boolean inside(String searched,String containing){
if (containing!=null)
return containing.contains(searched);
else
return false;
}
//FAO Areas extension
public String procFAO_2050(String $temp){
if ( inside("41",$temp) ||
inside("47",$temp) ) {if(!inside("48",$temp)){$temp += ", 48";}}
if ( inside("51",$temp) ||
inside("57",$temp) ) {if(!inside("58",$temp)){$temp += ", 58";}}
if ( inside("81",$temp) ||
inside("87",$temp) ) {if(!inside("88",$temp)){$temp += ", 88";}
}
if ( inside("67",$temp) ) {if(!inside("18",$temp)){$temp += ", 18";}}
if ( inside("31",$temp) ) {
if(!inside("21",$temp)){$temp += ", 21";}
if(!inside("41",$temp)){$temp += ", 41";}
}
if ( inside("34",$temp) ) {
if(!inside("27",$temp)){$temp += ", 27";}
if(!inside("47",$temp)){$temp += ", 47";}
}
if ( inside("71",$temp) ) {
if(!inside("61",$temp)){$temp += ", 61";}
if(!inside("81",$temp)){$temp += ", 81";}
}
if ( inside("77",$temp) ) {
if(!inside("67",$temp)){$temp += ", 67";}
if(!inside("87",$temp)){$temp += ", 87";}
}
return $temp;
}
//Probability calculation - initializes and calculates
public double getSpeciesProb(Object[] speciesResults,Object[] csquarecodeInfo){
String depthmin = getElement(speciesResults,0);
int depthmean = 0;
try{
depthmean = Integer.parseInt(""+speciesResults[1]);
}catch(Exception e){
}
String depthprefmin = getElement(speciesResults,2);
String pelagic = getElement(speciesResults,3);
String depthprefmax = getElement(speciesResults,4);
String depthmax = getElement(speciesResults,5);
String tempmin = getElement(speciesResults,6);
String layer = getElement(speciesResults,7);
String tempprefmin = getElement(speciesResults,8);
String tempprefmax = getElement(speciesResults,9);
String tempmax = getElement(speciesResults,10);
String salinitymin = getElement(speciesResults,11);
String salinityprefmin = getElement(speciesResults,12);
String salinityprefmax = getElement(speciesResults,13);
String salinitymax = getElement(speciesResults,14);
String primprodmin = getElement(speciesResults,15);
String primprodprefmin = getElement(speciesResults,16);
String primprodprefmax = getElement(speciesResults,17);
String primprodmax = getElement(speciesResults,18);
String iceconmin = getElement(speciesResults,19);
String iceconprefmin = getElement(speciesResults,20);
String iceconprefmax = getElement(speciesResults,21);
String iceconmax = getElement(speciesResults,22);
String landdistyn = getElement(speciesResults,23);
String landdistmin = getElement(speciesResults,24);
String landdistprefmin = getElement(speciesResults,25);
String landdistprefmax = getElement(speciesResults,26);
String landdistmax = getElement(speciesResults,27);
String csquarecode = getElement(csquarecodeInfo,0);
double depthmeancsquare = getNumber(csquarecodeInfo, 1);
double depthmaxcsquare = getNumber(csquarecodeInfo, 2);
double depthmincsquare = getNumber(csquarecodeInfo, 3);
double sstanmeancsquare = getNumber(csquarecodeInfo, 4);
double sbtanmeancsquare = getNumber(csquarecodeInfo, 5);
double salinitymeancsquare = getNumber(csquarecodeInfo, 6);
double salinitybmeancsquare = getNumber(csquarecodeInfo, 7);
double primprodmeancsquare = getNumber(csquarecodeInfo, 8);
String iceconanncsquare = csquarecodeInfo[9]==null? "":(""+csquarecodeInfo[9]);
double landdist = getNumber(csquarecodeInfo, 10);
String vprovider = "";
if (depthmean == 1) {
vprovider = "MM";
}
else {
vprovider = "suitable";
}
double prob = 0;
try{
// long t0 = System.currentTimeMillis();
prob = calcProb(depthmin, depthmean, depthmeancsquare, depthmaxcsquare, depthmincsquare, depthprefmin, pelagic, vprovider,
depthprefmax, depthmax, tempmin, layer, sstanmeancsquare, sbtanmeancsquare, tempprefmin, tempprefmax, tempmax,
salinitymin, salinitymeancsquare, salinitybmeancsquare, salinityprefmin, salinityprefmax, salinitymax,
primprodmin, primprodmeancsquare, primprodprefmin, primprodprefmax, primprodmax,
iceconmin, iceconanncsquare, iceconprefmin, iceconprefmax, iceconmax,
landdistyn, landdist, landdistmin, landdistprefmin, landdistprefmax, landdistmax);
// long t1 = System.currentTimeMillis();
// System.out.println("Time "+ (t0-t1)+"ms");
// avgTime = MathFunctions.incrementPerc(avgTime, t1-t0, totalcounter);
// totalcounter++;
// System.out.println("Average Time "+ avgTime +"ms");
}catch (Exception e){
// AnalysisLogger.getLogger().debug("Impossible to calculate probability: inconsistent values in the hcaf or hspen");
}
return prob;
}
float avgTime = 0;
int totalcounter = 0;
public AquamapsAlgorithmCore(){
}
//calculates probability
public double calcProb(String $paramData_DepthMin, int $paramData_MeanDepth, double $DepthMean, double $DepthMax, double $DepthMin, String $paramData_DepthPrefMin,
String $paramData_Pelagic, String $vprovider, String $paramData_DepthPrefMax, String $paramData_DepthMax,
String $paramData_SSTMin, String $paramData_layer, double $SSTAnMean, double $SBTAnMean, String $paramData_SSTPrefMin, String $paramData_SSTPrefMax, String $paramData_SSTMax,
String $paramData_SalinityMin, double $SalinityMean, double $SalinityBMean, String $paramData_SalinityPrefMin, String $paramData_SalinityPrefMax, String $paramData_SalinityMax,
String $paramData_PrimProdMin, double $PrimProdMean, String $paramData_PrimProdPrefMin, String $paramData_PrimProdPrefMax, String $paramData_PrimProdMax,
String $paramData_IceConMin, String $IceConAnn, String $paramData_IceConPrefMin, String $paramData_IceConPrefMax, String $paramData_IceConMax,
String $paramData_LandDistYN, double $LandDist, String $paramData_LandDistMin, String $paramData_LandDistPrefMin, String $paramData_LandDistPrefMax, String $paramData_LandDistMax ){
Double dparamData_DepthMin = null;
Double dparamData_DepthPrefMin = null;
Double dparamData_DepthPrefMax = null;
Double dparamData_DepthMax = null;
Double dparamData_SSTMin = null;
Double dparamData_SSTPrefMin = null;
Double dparamData_SSTMax = null;
Double dparamData_SSTPrefMax = null;
Double dparamData_SalinityMin = null;
Double dparamData_SalinityPrefMin = null;
Double dparamData_SalinityPrefMax = null;
Double dparamData_SalinityMax = null;
Double dparamData_PrimProdMax = null;
Double dparamData_PrimProdPrefMax = null;
Double dparamData_PrimProdPrefMin = null;
Double dparamData_PrimProdMin = null;
Double dparamData_IceConMax = null;
Double dparamData_IceConPrefMax = null;
Double dparamData_IceConPrefMin = null;
Double dparamData_IceConMin = null;
Double dIceConAnn = null;
Double dparamData_LandDistMax = null;
Double dparamData_LandDistPrefMax = null;
Double dparamData_LandDistPrefMin = null;
Double dparamData_LandDistMin = null;
//pre parsing of some variables
try{
dparamData_DepthMin = Double.parseDouble($paramData_DepthMin);
dparamData_DepthPrefMin = Double.parseDouble($paramData_DepthPrefMin);
dparamData_DepthPrefMax = Double.parseDouble($paramData_DepthPrefMax);
dparamData_DepthMax = Double.parseDouble($paramData_DepthMax);
dparamData_SSTMin = Double.parseDouble($paramData_SSTMin);
dparamData_SSTPrefMin = Double.parseDouble($paramData_SSTPrefMin);
dparamData_SSTMax = Double.parseDouble($paramData_SSTMax);
dparamData_SSTPrefMax = Double.parseDouble($paramData_SSTPrefMax);
dparamData_SalinityMin = Double.parseDouble($paramData_SalinityMin);
dparamData_SalinityPrefMin = Double.parseDouble($paramData_SalinityPrefMin);
dparamData_SalinityPrefMax = Double.parseDouble($paramData_SalinityPrefMax);
dparamData_SalinityMax = Double.parseDouble($paramData_SalinityMax);
dparamData_PrimProdMax = Double.parseDouble($paramData_PrimProdMax);
dparamData_PrimProdPrefMax = Double.parseDouble($paramData_PrimProdPrefMax);
dparamData_PrimProdPrefMin = Double.parseDouble($paramData_PrimProdPrefMin );
dparamData_PrimProdMin = Double.parseDouble($paramData_PrimProdMin);
dparamData_IceConMax = Double.parseDouble($paramData_IceConMax);
dparamData_IceConPrefMax = Double.parseDouble($paramData_IceConPrefMax);
dparamData_IceConPrefMin = Double.parseDouble($paramData_IceConPrefMin);
dparamData_IceConMin = Double.parseDouble($paramData_IceConMin);
dIceConAnn = Double.parseDouble($IceConAnn);
dparamData_LandDistMax = Double.parseDouble($paramData_LandDistMax);
dparamData_LandDistPrefMax = Double.parseDouble($paramData_LandDistPrefMax);
dparamData_LandDistPrefMin = Double.parseDouble($paramData_LandDistPrefMin);
dparamData_LandDistMin = Double.parseDouble($paramData_LandDistMin);
}catch(Exception ex){}
//end preparsing
// STEP 3 start of COMPUTATIONS - testing fields from HSPEN matched againsts all HCAF records
//initialize factors to compute pTotal (PROBABILITY) pDepth, pSST, pSalin, pIce, pLand
double $pDepth=0;
double $pSST=0;
double $pSalin=0;
double $pIce=0;
double $pLand=0; // previously set to 1 since when computation was omitted per Skit; now handles expert-reviewed HSPEN cases where distance to land is included
double $pPProd=0;
double $pTotal=0;
double $paramfld = 0;
double $paramfld1 = 0;
//##################################################################################################
// DEPTH
//##################################################################################################
if ($paramData_DepthMin == null ) {
$pDepth = 1;
}
else {
if ($paramData_MeanDepth == 1){
$paramfld = $DepthMean;
$paramfld1 = $DepthMean;
}
else {
$paramfld = $DepthMax;
$paramfld1 = $DepthMin;
}
$pDepth = -1;
if ($paramfld == -9999 || $paramData_DepthMin.equals("") ){$pDepth = 1;}
else
{
if ($paramData_DepthMin.equals("") ||($paramfld < dparamData_DepthMin))
{$pDepth = 0;}
else
{
if (
($paramfld < dparamData_DepthPrefMin) &&
($paramfld >= dparamData_DepthMin)
)
{
$pDepth = ($paramfld - dparamData_DepthMin) / (dparamData_DepthPrefMin - dparamData_DepthMin);
}
else
{
if ((Integer.parseInt($paramData_Pelagic)!= 0) && (!$vprovider.equals("MM")))
{$pDepth = 1;}
else
{
if (
($paramfld >= dparamData_DepthPrefMin) &&
($paramfld1 <= dparamData_DepthPrefMax)
)
{$pDepth = 1;}
else
{
if (!$paramData_DepthPrefMax.equals(""))
{
if ($paramfld1 >= dparamData_DepthPrefMax)
{
//to correct div by zero
if (
( dparamData_DepthMax - dparamData_DepthPrefMax) != 0
)
{
$pDepth = (dparamData_DepthMax - $paramfld1) / (dparamData_DepthMax - dparamData_DepthPrefMax);
}
else
{
$pDepth=0;
}
if (
(dparamData_DepthMax - dparamData_DepthPrefMax) != 0
)
{
$pDepth = (dparamData_DepthMax - $paramfld1) / (dparamData_DepthMax - dparamData_DepthPrefMax);
}
else
{
$pDepth=0;
}
if ($pDepth < 0){$pDepth = 0;}
}
else {$pDepth = 0;}
}
else {$pDepth = 0;}
}
}
}
}
}
}
// print "<br>Depth = ".$pDepth;
//##################################################################################################
// SST
//##################################################################################################
if ($paramData_SSTMin == null)
{ $pSST = 1;} else{
if ($paramData_layer.equals("s")) {$paramfld = $SSTAnMean;}
else if ($paramData_layer.equals("b")) {$paramfld = $SBTAnMean;}
if ($paramfld == -9999 || $paramData_SSTMin.equals("")){$pSST = 1;}
else
{
if ($paramfld < dparamData_SSTMin){$pSST = 0;}
else
{
if ( ($paramfld >= dparamData_SSTMin) &&
$paramfld < dparamData_SSTPrefMin)
{
$pSST = ($paramfld - dparamData_SSTMin) / (dparamData_SSTPrefMin - dparamData_SSTMin);
}
else
{
if (($paramfld >= dparamData_SSTPrefMin)&&
($paramfld <= dparamData_SSTPrefMax)){$pSST = 1;}
else
{
if (($paramfld > dparamData_SSTPrefMax) &&
($paramfld <= dparamData_SSTMax))
{
$pSST = (dparamData_SSTMax - $paramfld) / (dparamData_SSTMax - dparamData_SSTPrefMax);
}
else {$pSST = 0;}
}
}
}
}
}
// print "<br>Temp = ".$pSST;
//##################################################################################################
// Salinity
//##################################################################################################
if ($paramData_SalinityMin == null)
{ $pSalin = 1;} else{
if ($paramData_layer.equals("s")) {$paramfld = $SalinityMean;}
else if ($paramData_layer.equals("b")) {$paramfld = $SalinityBMean;}
if ($paramfld == -9999 || $paramData_SalinityMin.equals("") )
{
//'no data available
$pSalin = 1;
}
else
{
if ($paramfld < dparamData_SalinityMin){$pSalin = 0;}
else
{
if ($paramfld >= dparamData_SalinityMin &&
$paramfld < dparamData_SalinityPrefMin)
{
$pSalin = ($paramfld - dparamData_SalinityMin) / (dparamData_SalinityPrefMin - dparamData_SalinityMin);
}
else
{
if ($paramfld >= dparamData_SalinityPrefMin &&
$paramfld <= dparamData_SalinityPrefMax){$pSalin = 1;}
else
{
if (($paramfld > dparamData_SalinityPrefMax) &&
$paramfld <= dparamData_SalinityMax)
{
$pSalin = (dparamData_SalinityMax - $paramfld) / (dparamData_SalinityMax - dparamData_SalinityPrefMax);
}
else
{$pSalin = 0;}
}
}
}
}
}
// print "<br>Salinity = ".$pSalin;
//##################################################################################################
// Primary Production
//##################################################################################################
if ($paramData_PrimProdMin == null)
{ $pPProd = 1;} else{
//modification of 07 04 11
if ($PrimProdMean == -9999)
{
//Then 'no data available
$pPProd = 1;
}
else
{
if ($PrimProdMean < dparamData_PrimProdMin )
{
$pPProd = 0;
}
else
{
if (($PrimProdMean >= dparamData_PrimProdMin) && ($PrimProdMean < dparamData_PrimProdPrefMin))
{
$pPProd = ($PrimProdMean - dparamData_PrimProdMin) / (dparamData_PrimProdPrefMin - dparamData_PrimProdMin);
}
else
{
if (($PrimProdMean >= dparamData_PrimProdPrefMin) && ($PrimProdMean <= dparamData_PrimProdPrefMax))
{
$pPProd = 1;
}
else
{
if (($PrimProdMean >dparamData_PrimProdPrefMax) && ($PrimProdMean <= dparamData_PrimProdMax))
{
$pPProd = (dparamData_PrimProdMax - $PrimProdMean) / (dparamData_PrimProdMax - dparamData_PrimProdPrefMax);
}
else
{
$pPProd = 0;
}
}
}
}
}
}
// print "<br> Primary Prod = ".$pPProd;
//###################################################################################################################################
// Sea Ice Con
//###################################################################################################################################
if (($paramData_IceConMin == null) || ($paramData_IceConMin.length() == 0)){$pIce = 1;}
else{
//modification of 07 04 11
if (($IceConAnn == null) ||($IceConAnn.length() == 0 )) {$pIce = 1;}
else{
if(!$IceConAnn.equals(""))
{
int $flgIceProbMultiplicationAlgorithm;
if (dIceConAnn < dparamData_IceConMin)
{
$pIce = 0;
$flgIceProbMultiplicationAlgorithm = 0;
}
else if ((dIceConAnn >= dparamData_IceConMin)
&&
(dIceConAnn < dparamData_IceConPrefMin))
{
$pIce = (dIceConAnn - dparamData_IceConMin) / (dparamData_IceConPrefMin - dparamData_IceConMin);
$flgIceProbMultiplicationAlgorithm = 0;
}
else if ( ((dIceConAnn) >= (dparamData_IceConPrefMin))
&&
((dIceConAnn) <= (dparamData_IceConPrefMax)))
{
$pIce = 1;
$flgIceProbMultiplicationAlgorithm = 1;
}
else if ( ((dIceConAnn) > (dparamData_IceConPrefMax))
&&
((dIceConAnn) <= (dparamData_IceConMax)) )
{
$pIce = (((dparamData_IceConMax) - (dIceConAnn))) / (((dparamData_IceConMax) - (dparamData_IceConPrefMax)));
$flgIceProbMultiplicationAlgorithm = 1;
}
else if ((dIceConAnn) > (dparamData_IceConMax))
{
$pIce = 0;
$flgIceProbMultiplicationAlgorithm = 1;
}
}
}
}
// print" pIce = $pIce <br> ";
//###################################################################################################################################
// Distance to Land
//###################################################################################################################################
if (Integer.parseInt($paramData_LandDistYN) == 0) {
$pLand = 1;
}
else {
$pLand = 0;
if ($LandDist == -9999 || $paramData_LandDistMin.equals("") ) {
//no data available
$pLand = 1;
}
else
{
if ($LandDist < (dparamData_LandDistMin)){
$pLand = 0;
}
else
{
if (($LandDist >= (dparamData_LandDistMin)) && ($LandDist < (dparamData_LandDistPrefMin))) {
$pLand = ($LandDist - (dparamData_LandDistMin)) / ((dparamData_LandDistPrefMin) - (dparamData_LandDistMin));
}
else
{
if ((dparamData_LandDistPrefMax) > 1000) {
$pLand = 1;
}
else
{
if (($LandDist >= (dparamData_LandDistPrefMin)) && ($LandDist <= (dparamData_LandDistPrefMax))){$pLand = 1;}
else
{
if (($LandDist > (dparamData_LandDistPrefMax)) && ($LandDist <= (dparamData_LandDistMax))) {
$pLand = ((dparamData_LandDistMax) - $LandDist) / ((dparamData_LandDistMax) - (dparamData_LandDistPrefMax));
}
else {$pLand = 0;}
}
}
}
}
}
}
//print "<br> Distance to Land = ".$pLand;
/*
System.out.println("FACTORS ");
System.out.println("$pSST: "+$pSST);
System.out.println("$pDepth: "+$pDepth);
System.out.println("$pSalin: "+$pSalin);
System.out.println("$pLand: "+$pLand);
System.out.println("$pPProd: "+$pPProd);
System.out.println("$pIce: "+$pIce);
System.out.println("//");
*/
// get output of $pTotal by multiplication
$pTotal = $pSST * $pDepth * $pSalin * $pLand * $pPProd * $pIce;
return $pTotal;
}
}

View File

@ -0,0 +1,51 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public class AquamapsNative extends AquamapsSuitable {
@Override
// to overwrite in case of native generation in order to filer on the probabilities types
public Queue<String> filterProbabilitySet(Queue<String> probabiltyRows) {
ConcurrentLinkedQueue<String> speciesCriteria1 = new ConcurrentLinkedQueue<String>();
ConcurrentLinkedQueue<String> speciesCriteria2 = new ConcurrentLinkedQueue<String>();
ConcurrentLinkedQueue<String> speciesCriteria3 = new ConcurrentLinkedQueue<String>();
int size = probabiltyRows.size();
for (int i = 0; i < size; i++) {
String rowString = probabiltyRows.poll();
if ((rowString != null) && (rowString.length() > 0)) {
String[] probabilityRows = rowString.split(",");
short Inbox = Short.parseShort(probabilityRows[3].replace("'",""));
short InFAO = Short.parseShort(probabilityRows[4].replace("'",""));
if ((Inbox == 1) && (InFAO == 1)) {
speciesCriteria1.offer(rowString);
} else if ((Inbox == 0) && (InFAO == 1)) {
speciesCriteria2.offer(rowString);
} else if ((Inbox == 1) && (InFAO == 0)) {
speciesCriteria3.offer(rowString);
}
}
}
if (speciesCriteria1.size()>0)
return speciesCriteria1;
else if (speciesCriteria2.size()>0)
return speciesCriteria2;
else if (speciesCriteria3.size()>0)
return speciesCriteria3;
else
return new ConcurrentLinkedQueue<String>();
}
@Override
public boolean isSynchronousProbabilityWrite() {
return true;
}
@Override
public String getName() {
return "AQUAMAPS_NATIVE";
}
}

View File

@ -0,0 +1,18 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.hibernate.SessionFactory;
public class AquamapsNative2050 extends AquamapsNative {
public void init(GenericConfiguration config,SessionFactory dbHibConnection) {
super.init(config, dbHibConnection);
type = "2050";
}
@Override
public String getName() {
return "AQUAMAPS_NATIVE_2050";
}
}

View File

@ -0,0 +1,209 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Queue;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionTable;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.hibernate.SessionFactory;
public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
String selectAllSpeciesQuery = "select depthmin,meandepth,depthprefmin,pelagic,depthprefmax,depthmax,tempmin,layer,tempprefmin,tempprefmax,tempmax,salinitymin,salinityprefmin,salinityprefmax,salinitymax,primprodmin,primprodprefmin,primprodprefmax,primprodmax,iceconmin,iceconprefmin,iceconprefmax,iceconmax,landdistyn,landdistmin,landdistprefmin,landdistprefmax,landdistmax,nmostlat,smostlat,wmostlong,emostlong,faoareas,speciesid from %1$s;";
String csquareCodeQuery = "select csquarecode,depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea,centerlat,centerlong,faoaream,eezall,lme from %1$s d where oceanarea>0";
String createTableStatement = "CREATE TABLE %1$s ( speciesid character varying, csquarecode character varying, probability real, boundboxyn smallint, faoareayn smallint, faoaream integer, eezall character varying, lme integer) WITH (OIDS=FALSE ); CREATE INDEX CONCURRENTLY %1$s_idx ON %1$s USING btree (speciesid, csquarecode, faoaream, eezall, lme);";
String destinationTable;
String metainfo ="boundboxyn, faoareayn, faoaream, eezall, lme";
String selectAllSpeciesObservationQuery = "SELECT speciesid,maxclat,minclat from %1$s;";
String hspenMinMaxLat = "maxminlat_hspen";
AquamapsAlgorithmCore core;
protected String currentFAOAreas;
protected HashMap<String,String> currentSpeciesBoundingBoxInfo;
protected HashMap<String, List<Object>> allSpeciesObservations;
//to overwrite in case of 2050
protected String type = null;
@Override
public void init(GenericConfiguration config,SessionFactory dbHibConnection) {
selectAllSpeciesQuery = String.format(selectAllSpeciesQuery, config.getEnvelopeTable());
csquareCodeQuery = String.format(csquareCodeQuery, config.getCsquarecodesTable());
createTableStatement = String.format(createTableStatement,config.getDistributionTable());
destinationTable = config.getDistributionTable();
core = new AquamapsAlgorithmCore();
if ((config.getPreprocessedTables()!=null)&&(config.getPreprocessedTables().size()>0))
hspenMinMaxLat = config.getPreprocessedTables().get(0);
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init(->getting min max latitudes from "+hspenMinMaxLat);
allSpeciesObservations = new HashMap<String, List<Object>>();
List<Object> SpeciesObservations = DatabaseFactory.executeSQLQuery(String.format(selectAllSpeciesObservationQuery, hspenMinMaxLat), dbHibConnection);
int lenObservations = SpeciesObservations.size();
for (int i=0;i<lenObservations;i++){
Object[] maxminArray = (Object[])SpeciesObservations.get(i);
String speciesid = (String)maxminArray[0];
List<Object> maxminInfo = new ArrayList<Object>();
maxminInfo.add(maxminArray);
allSpeciesObservations.put((String)speciesid, maxminInfo);
}
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init(->init finished");
}
@Override
public String getMainInfoQuery() {
return selectAllSpeciesQuery;
}
@Override
public String getGeographicalInfoQuery() {
return csquareCodeQuery;
}
@Override
public float calcProb(Object species, Object area) {
return (float) core.getSpeciesProb((Object[]) species, (Object[]) area);
}
@Override
public String getAdditionalMetaInformation() {
return metainfo;
}
@Override
public String getAdditionalInformation(Object species, Object area) {
Object[] arearray = (Object[]) area;
HashMap<String,Integer> boundingInfo = calculateBoundingBox(arearray);
String addedInformation = "'"+boundingInfo.get("$InBox")+"','"+boundingInfo.get("$InFAO")+"','"+arearray[14]+"','"+arearray[15]+"','"+arearray[16]+"'";
return addedInformation;
}
@Override
public void postProcess() {
}
@Override
public String getDistributionTableStatement() {
return createTableStatement;
}
@Override
public String getMainInfoID(Object speciesInfo) {
String s = ""+ ((Object[])speciesInfo)[33];
return s;
}
@Override
public String getGeographicalID(Object geoInfo) {
String s = ""+ ((Object[])geoInfo)[0];
return s;
}
public HashMap<String,Integer> calculateBoundingBox(Object[] csquarecode){
HashMap<String,Integer> boundingInfo = core.calculateBoundingBox(
""+csquarecode[0],
currentSpeciesBoundingBoxInfo.get("$pass_NS"),
currentSpeciesBoundingBoxInfo.get("$pass_N"),
currentSpeciesBoundingBoxInfo.get("$pass_S"),
AquamapsAlgorithmCore.getElement(csquarecode,12),//centerlat
AquamapsAlgorithmCore.getElement(csquarecode,13),//centerlong
AquamapsAlgorithmCore.getElement(csquarecode,14),//faoaream
currentSpeciesBoundingBoxInfo.get("$paramData_NMostLat"),
currentSpeciesBoundingBoxInfo.get("$paramData_SMostLat"),
currentSpeciesBoundingBoxInfo.get("$paramData_WMostLong"),
currentSpeciesBoundingBoxInfo.get("$paramData_EMostLong"),
currentFAOAreas,
currentSpeciesBoundingBoxInfo.get("$northern_hemisphere_adjusted"),
currentSpeciesBoundingBoxInfo.get("$southern_hemisphere_adjusted")
);
return boundingInfo;
}
//initializes currentFAOAreas and currentSpeciesBoundingBoxInfo
public void getBoundingBoxInformation(Object[] speciesInfoRow, Object[] speciesObservations){
Object[] row = speciesInfoRow;
String $paramData_NMostLat = AquamapsAlgorithmCore.getElement(row,28);
String $paramData_SMostLat = AquamapsAlgorithmCore.getElement(row,29);
String $paramData_WMostLong = AquamapsAlgorithmCore.getElement(row,30);
String $paramData_EMostLong = AquamapsAlgorithmCore.getElement(row,31);
currentFAOAreas = AquamapsAlgorithmCore.getElement(row,32);
//adjust FAO areas
currentFAOAreas = core.procFAO_2050(currentFAOAreas);
//get Bounding Box Information
// AnalysisLogger.getLogger().trace("TYPE:"+type);
currentSpeciesBoundingBoxInfo = core.getBoundingBoxInfo($paramData_NMostLat, $paramData_SMostLat, $paramData_WMostLong, $paramData_EMostLong, speciesObservations,type);
//end of get BoundingBoxInformation
}
@Override
public void singleStepPreprocess(Object species, Object allAreasInformation) {
List<Object> speciesObservations = allSpeciesObservations.get(getMainInfoID(species));
if( ((speciesObservations==null)||speciesObservations.size()==0)){
Object[] defaultmaxmin = {"90","-90"};
speciesObservations = new ArrayList<Object>();
speciesObservations.add(defaultmaxmin);
}
getBoundingBoxInformation((Object[])species,(Object[])speciesObservations.get(0));
}
@Override
public void singleStepPostprocess(Object species, Object area) {
}
@Override
public boolean isSynchronousProbabilityWrite() {
return false;
}
@Override
public String filterProbabiltyRow(String probabiltyRow) {
return probabiltyRow;
}
@Override
//to overwrite in case of native generation in order to filer on the probabilities types
public Queue<String> filterProbabilitySet(Queue<String> probabiltyRows) {
return probabiltyRows;
}
@Override
public float getInternalStatus() {
return 100;
}
@Override
public ALG_PROPS[] getProperties() {
ALG_PROPS [] p = {ALG_PROPS.SPECIES_VS_CSQUARE_FROM_DATABASE, ALG_PROPS.SPECIES_VS_CSQUARE_REMOTE_FROM_DATABASE};
return p;
}
@Override
public String getName() {
return "AQUAMAPS_SUITABLE";
}
@Override
public String getDescription() {
return "Algorithm by Aquamaps";
}
}

View File

@ -0,0 +1,18 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.hibernate.SessionFactory;
public class AquamapsSuitable2050 extends AquamapsSuitable {
public void init(GenericConfiguration config,SessionFactory dbHibConnection) {
super.init(config, dbHibConnection);
type = "2050";
}
@Override
public String getName() {
return "AQUAMAPS_SUITABLE_2050";
}
}

View File

@ -0,0 +1,190 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class DummyAlgorithm implements SpatialProbabilityDistributionGeneric{
List<String> randomElements;
String persistence;
static String persistedFilePrefix = "dummyfile";
public static void main (String[] args){
String s = toString(330.6499f);
System.out.println(s);
System.out.println(fromString(s));
}
private static String toString(float number){
String s = ""+number;
int m = s.length();
String res = "";
for (int i=0;i<m;i++){
int k = 0;
if (s.charAt(i)=='.')
k = (int)s.charAt(i);
else
k = Integer.parseInt(""+s.charAt(i));
res+=(char) (k+65);
}
return res;
}
private static float fromString(String alphanumeric){
int m = alphanumeric.length();
String res = "";
for (int i=0;i<m;i++){
int k = (int)alphanumeric.charAt(i) - 65;
if (k == (int)'.')
res+=".";
else
res+= k;
}
return Float.parseFloat(res);
}
@Override
public void init(GenericConfiguration config) {
AnalysisLogger.getLogger().trace("Dummy INIT");
randomElements = new ArrayList<String>();
for (int i=0;i<170000;i++)
{
randomElements.add(""+(100*Math.random()));
}
persistence = config.getPersistencePath();
}
@Override
public String getMainInfoType() {
return String.class.getName();
}
@Override
public String getGeographicalInfoType() {
return String.class.getName();
}
@Override
public List<Object> getMainInfoObjects() {
List<Object> randomElements = new ArrayList<Object>();
for (int i=0;i<20;i++)
{
randomElements.add(toString((float)(100f*Math.random())));
}
return randomElements;
}
@Override
public List<Object> getGeographicalInfoObjects() {
AnalysisLogger.getLogger().trace("Dummy TAKING RANDOMS");
List<Object> randomElements = new ArrayList<Object>();
for (int i=0;i<170000;i++)
{
randomElements.add(""+(100*Math.random()));
}
return randomElements;
}
@Override
public float calcProb(Object mainInfo, Object area) {
// AnalysisLogger.getLogger().debug("Calculation Probability");
Float f1 = fromString((String) mainInfo);
Float f2 = Float.valueOf((String) area);
return (float) 100f*f1*f2;
}
@Override
public void singleStepPreprocess(Object mainInfo, Object area) {
AnalysisLogger.getLogger().trace("Dummy SINGLE PREPROCESSING Step");
}
@Override
public void singleStepPostprocess(Object mainInfo, Object allAreasInformation) {
AnalysisLogger.getLogger().trace("Dummy SINGLE POSTPROCESSING Step");
}
@Override
public void postProcess() {
AnalysisLogger.getLogger().trace("Dummy POSTPROCESS");
}
@Override
public void storeDistribution(Map<Object, Map<Object, Float>> distribution) {
ObjectOutputStream outputStream = null;
try {
int ysize = 0;
for (Object s:distribution.keySet()){
ysize = distribution.get(s).size();
break;
}
AnalysisLogger.getLogger().debug("Dummy overall dimension of the distribution: "+distribution.size()+" X "+ysize);
//Construct the LineNumberReader object
String filename = persistence+persistedFilePrefix+UUID.randomUUID();
AnalysisLogger.getLogger().debug("Dummy Storing in "+filename);
outputStream = new ObjectOutputStream(new FileOutputStream(persistence+persistedFilePrefix+"_"+UUID.randomUUID()));
outputStream.writeObject(distribution);
AnalysisLogger.getLogger().debug("Dummy Stored");
} catch (Exception ex) {
ex.printStackTrace();
} finally {
try {
if (outputStream != null) {
outputStream.flush();
outputStream.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
@Override
public float getInternalStatus() {
return 100f;
}
@Override
public String getMainInfoID(Object mainInfo) {
return (String)mainInfo;
}
@Override
public String getGeographicalID(Object geoInfo) {
return (String)geoInfo;
}
@Override
public ALG_PROPS[] getProperties() {
ALG_PROPS [] p = {ALG_PROPS.PHENOMENON_VS_GEOINFO};
return p;
}
@Override
public String getName() {
return "DUMMY";
}
@Override
public String getDescription() {
// TODO Auto-generated method stub
return null;
}
}

View File

@ -0,0 +1,138 @@
package org.gcube.dataanalysis.ecoengine.spatialdistributions;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.SpatialProbabilityDistributionGeneric;
public class TestAlgorithm implements SpatialProbabilityDistributionGeneric{
@Override
public ALG_PROPS[] getProperties() {
ALG_PROPS[] p = {ALG_PROPS.PHENOMENON_VS_GEOINFO};
return p;
}
String pers;
@Override
public void init(GenericConfiguration config) {
pers = config.getPersistencePath();
}
@Override
public String getMainInfoType() {
return String.class.getName();
}
@Override
public String getGeographicalInfoType() {
return String.class.getName();
}
@Override
public List<Object> getMainInfoObjects() {
List<Object> o = new ArrayList<Object>();
o.add("pheno1");
o.add("pheno2");
return o;
}
@Override
public List<Object> getGeographicalInfoObjects() {
List<Object> o = new ArrayList<Object>();
o.add("geo1");
o.add("geo2");
return o;
}
@Override
public float calcProb(Object mainInfo, Object area) {
String phen = (String) mainInfo;
String geo = (String) area;
System.out.println(phen+" vs "+geo);
return 1;
}
@Override
public void singleStepPreprocess(Object mainInfo, Object area) {
}
@Override
public void singleStepPostprocess(Object mainInfo, Object allAreasInformation) {
}
@Override
public void postProcess() {
}
@Override
public void storeDistribution(Map<Object, Map<Object, Float>> distribution) {
ObjectOutputStream outputStream = null;
try {
int ysize = 0;
for (Object s:distribution.keySet()){
ysize = distribution.get(s).size();
break;
}
AnalysisLogger.getLogger().debug("overall dimension of the distribution: "+distribution.size()+" X "+ysize);
//Construct the LineNumberReader object
String filename = pers+"testProb"+UUID.randomUUID();
AnalysisLogger.getLogger().debug(" Storing in "+filename);
outputStream = new ObjectOutputStream(new FileOutputStream(filename));
outputStream.writeObject(distribution);
AnalysisLogger.getLogger().debug("Stored");
} catch (Exception ex) {
ex.printStackTrace();
} finally {
try {
if (outputStream != null) {
outputStream.flush();
outputStream.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
@Override
public float getInternalStatus() {
return 100;
}
@Override
public String getMainInfoID(Object mainInfo) {
return (String) mainInfo;
}
@Override
public String getGeographicalID(Object geoInfo) {
return (String) geoInfo;
}
@Override
public String getName() {
return "TEST";
}
@Override
public String getDescription() {
return null;
}
}

View File

@ -0,0 +1,164 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.HashMap;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class RegressionTestsGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
// Generator generator = GeneratorsFactory.getGenerator(testConfigRemote());
// generate(generator);
System.out.println("TEST 2");
Generator generator = GeneratorsFactory.getGenerator(testConfigSuitable());
generate(generator);
/*
System.out.println("TEST 3");
generator = GeneratorsFactory.getGenerator(testConfigNative());
generate(generator);
System.out.println("TEST 4");
generator = GeneratorsFactory.getGenerator(testConfigSuitable2050());
generate(generator);
System.out.println("TEST 5");
generator = GeneratorsFactory.getGenerator(testConfigNative2050());
generate(generator);
*/
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
RegressionTestsGeneration tgs = new RegressionTestsGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static GenericConfiguration testConfigRemote() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_remote_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setModel("REMOTE_AQUAMAPS_SUITABLE");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setGeneralProperties(properties);
return config;
}
private static GenericConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE");
config.setGenerator("LOCAL_WITH_DATABASE");
return config;
}
private static GenericConfiguration testConfigNative() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE");
return config;
}
private static GenericConfiguration testConfigNative2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE_2050");
return config;
}
private static GenericConfiguration testConfigSuitable2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE_2050");
return config;
}
}

View File

@ -0,0 +1,174 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.HashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class RegressionTestsGenerationList {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
// Generator generator = GeneratorsFactory.getGenerator(testConfigRemote());
// generate(generator);
System.out.println("TEST 2");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigSuitable());
Generator generator = generators.get(0);
generator.init();
generate(generator);
/*
System.out.println("TEST 3");
generator = GeneratorsFactory.getGenerator(testConfigNative());
generate(generator);
System.out.println("TEST 4");
generator = GeneratorsFactory.getGenerator(testConfigSuitable2050());
generate(generator);
System.out.println("TEST 5");
generator = GeneratorsFactory.getGenerator(testConfigNative2050());
generate(generator);
*/
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
RegressionTestsGenerationList tgs = new RegressionTestsGenerationList();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static GenericConfiguration testConfigRemote() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_remote_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setModel("REMOTE_AQUAMAPS_SUITABLE");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setGeneralProperties(properties);
return config;
}
private static GenericConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_micro");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
config.setRemoteEnvironment("windows azure");
HashMap<String, String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setModel("AQUAMAPS_SUITABLE");
return config;
}
private static GenericConfiguration testConfigNative() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE");
return config;
}
private static GenericConfiguration testConfigNative2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_native_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE_2050");
return config;
}
private static GenericConfiguration testConfigSuitable2050() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_2050_test");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE_2050");
return config;
}
}

View File

@ -0,0 +1,93 @@
package org.gcube.dataanalysis.ecoengine.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Modeler;
import org.gcube.dataanalysis.ecoengine.processing.factories.ModelersFactory;
public class RegressionTestsModeling {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
GenericConfiguration modelconfig = new GenericConfiguration();
modelconfig.setConfigPath("./cfg/");
modelconfig.setModel("HSPEN");
Modeler modeler = ModelersFactory.getGenerator(modelconfig);
Object input = testInputConfig();
Object output = testOutputConfig();
// modeler.model(input, null, output);
generate(modeler, input, output);
}
private static void generate(Modeler modeler, Object input , Object output) throws Exception {
if (modeler != null) {
RegressionTestsModeling tgs = new RegressionTestsModeling();
ThreadCalculator tc = tgs.new ThreadCalculator(modeler,input,output);
Thread t = new Thread(tc);
t.start();
while (modeler.getStatus() < 100) {
String resLoad = modeler.getResourceLoad();
String ress = modeler.getResources();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Modeling Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Modeler mg;
Object input;
Object output;
public ThreadCalculator(Modeler modeler, Object input ,Object output) {
this.mg = modeler;
this.input=input;
this.output=output;
}
public void run() {
try {
mg.model(input,null, output);
} catch (Exception e) {
}
}
}
private static GenericConfiguration testInputConfig() {
GenericConfiguration config = new GenericConfiguration();
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setOccurrenceCellsTable("occurrencecells");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
return config;
}
private static GenericConfiguration testOutputConfig() {
GenericConfiguration config = new GenericConfiguration();
config.setEnvelopeTable("hspen_validation_refactored");
return config;
}
}

View File

@ -0,0 +1,74 @@
package org.gcube.dataanalysis.ecoengine.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class TestsDummyGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
Generator generator = GeneratorsFactory.getGenerator(testConfigSuitable());
generate(generator);
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
TestsDummyGeneration tgs = new TestsDummyGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static GenericConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(2);
config.setModel("DUMMY");
return config;
}
}

View File

@ -0,0 +1,77 @@
package org.gcube.dataanalysis.ecoengine.test;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class TestsTESTGeneration {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<Generator> generators = GeneratorsFactory.getGenerators(testConfigSuitable());
generators.get(0).init();
generate(generators.get(0));
generators = null;
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
TestsTESTGeneration tgs = new TestsTESTGeneration();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static GenericConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setNumberOfResources(2);
config.setModel("TEST");
return config;
}
}

View File

@ -0,0 +1,83 @@
package org.gcube.dataanalysis.ecoengine.test.generations;
import java.util.HashMap;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.Generator;
import org.gcube.dataanalysis.ecoengine.processing.factories.GeneratorsFactory;
public class GenerationHSPECValidation {
/**
* example of parallel processing on a single machine the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception {
Generator generator = GeneratorsFactory.getGenerator(testConfigSuitable());
generate(generator);
}
private static void generate(Generator generator) throws Exception {
if (generator != null) {
GenerationHSPECValidation tgs = new GenerationHSPECValidation();
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus() < 100) {
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: " + resLoad);
System.out.println("RESOURCES: " + ress);
System.out.println("SPECIES: " + species);
System.out.println("STATUS: " + generator.getStatus());
Thread.sleep(1000);
}
} else
AnalysisLogger.getLogger().trace("Generator Algorithm Not Supported");
}
public class ThreadCalculator implements Runnable {
Generator dg;
public ThreadCalculator(Generator dg) {
this.dg = dg;
}
public void run() {
try {
dg.generate();
} catch (Exception e) {
}
}
}
private static GenericConfiguration testConfigSuitable() {
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_validation_automatic");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE");
return config;
}
}

View File

@ -0,0 +1,69 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestLocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestLocalGenerationStatus tgs = new RegressionTestLocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -0,0 +1,69 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestNative2050LocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on native species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestNative2050LocalGenerationStatus tgs = new RegressionTestNative2050LocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_native_2050_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE_2050");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -0,0 +1,69 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestNativeLocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on native species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestNativeLocalGenerationStatus tgs = new RegressionTestNativeLocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_native_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_NATIVE");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -0,0 +1,83 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import java.util.HashMap;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator;
public class RegressionTestRemoteGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on suitable species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestRemoteGenerationStatus tgs = new RegressionTestRemoteGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(20);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://146.48.87.169/testdb");
config.setModel("REMOTE_AQUAMAPS_SUITABLE");
config.setRemoteCalculator("http://node1.d.venusc.research-infrastructures.eu:5942/api/");
config.setServiceUserName("gianpaolo.coro");
//new parameters
config.setRemoteEnvironment("windows azure");
HashMap<String,String> properties = new HashMap<String, String>();
properties.put("property1", "value1");
properties.put("property2", "value2");
config.setGeneralProperties(properties);
RainyCloudGenerator generator = new RainyCloudGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(1000);
}
}
public class ThreadCalculator implements Runnable {
RainyCloudGenerator dg ;
public ThreadCalculator(RainyCloudGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -0,0 +1,69 @@
package org.gcube.dataanalysis.ecoengine.test.regressions1;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator;
public class RegressionTestSuitable2050LocalGenerationStatus {
/**
* example of parallel processing on a single machine
* the procedure will generate a new table for a distribution on native species
*
*/
public static void main(String[] args) throws Exception{
RegressionTestSuitable2050LocalGenerationStatus tgs = new RegressionTestSuitable2050LocalGenerationStatus();
GenericConfiguration config = new GenericConfiguration();
//path to the cfg directory containing default parameters
config.setDistributionTable("hspec_suitable_2050_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_validation");
// config.setEnvelopeTable("hspen_mini");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
config.setModel("AQUAMAPS_SUITABLE_2050");
LocalSplitGenerator generator = new LocalSplitGenerator(config);
ThreadCalculator tc = tgs.new ThreadCalculator(generator);
Thread t = new Thread(tc);
t.start();
while (generator.getStatus()<100){
String resLoad = generator.getResourceLoad();
String ress = generator.getResources();
String species = generator.getLoad();
System.out.println("LOAD: "+resLoad);
System.out.println("RESOURCES: "+ress);
System.out.println("SPECIES: "+species);
System.out.println("STATUS: "+generator.getStatus());
Thread.sleep(30000);
}
}
public class ThreadCalculator implements Runnable {
LocalSplitGenerator dg ;
public ThreadCalculator(LocalSplitGenerator dg) {
this.dg = dg;
}
public void run() {
try{
dg.generate();
}catch(Exception e){}
}
}
}

View File

@ -0,0 +1,43 @@
package org.gcube.dataanalysis.ecoengine.test.tables;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.HpecDiscrepanciesCalculator;
public class CompareHspecSingle {
public static void main(String[] args) throws Exception{
String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_micro");
// config.setEnvelopeTable("hspen");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
HpecDiscrepanciesCalculator ec = new HpecDiscrepanciesCalculator(config);
ec.referenceTable = "hspec_validation where probability>0.19";
ec.analyzedTable = "hspec_suitable_automatic_local";
ec.referenceCriteria = "speciesid,csquarecode";
ec.destinationCriteria = "speciesid,csquarecode";
ec.referenceSelectedColumns = "speciesid,csquarecode,probability";
ec.destinationSelectedColumns = "speciesid,csquarecode,probability";
long t0 = System.currentTimeMillis();
ec.runTest();
long t1 = System.currentTimeMillis();
float difference = (t1-t0);
difference = difference /(float)(1000*60);
System.out.println("Elapsed time : "+difference+" min");
}
}

View File

@ -0,0 +1,45 @@
package org.gcube.dataanalysis.ecoengine.test.tables;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.TablesDiscrepanciesCalculator;
public class CompareTables {
public static void main(String[] args) throws Exception{
String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_micro");
// config.setEnvelopeTable("hspen");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
TablesDiscrepanciesCalculator ec = new TablesDiscrepanciesCalculator(config);
ec.referenceTable = "hspec_suitable_automatic_local";
ec.analyzedTable = "hspec_suitable_automatic_local2";
ec.referenceCriteria = "speciesid,csquarecode";
ec.destinationCriteria = "speciesid,csquarecode";
ec.referenceSelectedColumns = "speciesid,csquarecode,probability";
ec.destinationSelectedColumns = "speciesid,csquarecode,probability";
long t0 = System.currentTimeMillis();
ec.runTest();
long t1 = System.currentTimeMillis();
float difference = (t1-t0);
difference = difference /(float)(1000*60);
System.out.println("Elapsed time : "+difference+" min");
System.out.print("ciaaooooo");
}
}

View File

@ -0,0 +1,279 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.Iterator;
import java.util.List;
import javax.xml.parsers.DocumentBuilderFactory;
import org.dom4j.Document;
import org.dom4j.Node;
import org.dom4j.io.SAXReader;
import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
import org.hibernate.jdbc.Work;
public class DatabaseFactory{
public static SessionFactory initDBConnection(String configurationFile) throws Exception {
String xml = FileTools.readXMLDoc(configurationFile);
SessionFactory DBSessionFactory = null;
Configuration cfg = new Configuration();
cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(xml.getBytes())));
DBSessionFactory = cfg.buildSessionFactory();
return DBSessionFactory;
}
@SuppressWarnings({"unchecked"})
public static SessionFactory initDBConnection(String configurationFile, LexicalEngineConfiguration config) throws Exception {
if (config==null)
return initDBConnection(configurationFile);
// take the configuration file
File fl = new File(configurationFile);
FileInputStream stream = new FileInputStream(fl);
SAXReader saxReader = new SAXReader();
Document document = saxReader.read(stream);
List<Node> nodes = document.selectNodes("//hibernate-configuration/session-factory/property");
Iterator<Node> nodesIterator = nodes.iterator();
while (nodesIterator.hasNext()) {
Node currentnode = nodesIterator.next();
String element = currentnode.valueOf("@name");
if (element.equals("connection.driver_class"))
if (config.getDatabaseDriver() != null){
currentnode.setText(config.getDatabaseDriver());
}
if (element.equals("connection.url")) {
if (config.getDatabaseURL() != null)
currentnode.setText(config.getDatabaseURL());
}
if (element.equals("connection.username")) {
if (config.getDatabaseUserName() != null)
currentnode.setText(config.getDatabaseUserName());
}
if (element.equals("connection.password")) {
if (config.getDatabasePassword() != null)
currentnode.setText(config.getDatabasePassword());
}
if (element.equals("dialect")) {
if (config.getDatabaseDialect() != null)
currentnode.setText(config.getDatabaseDialect());
}
if (element.equals("c3p0.idleConnectionTestPeriod")) {
if (config.getDatabaseIdleConnectionTestPeriod() != null)
currentnode.setText(config.getDatabaseIdleConnectionTestPeriod());
}
if (element.equals("c3p0.automaticTestTable")) {
if (config.getDatabaseAutomaticTestTable() != null)
currentnode.setText(config.getDatabaseAutomaticTestTable());
}
}
Configuration cfg = new Configuration();
cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(document.asXML().getBytes())));
cfg.setProperty("hibernate.hbm2ddl.auto", "create");
SessionFactory DBSessionFactory = null;
DBSessionFactory = cfg.buildSessionFactory();
// close stream
stream.close();
return DBSessionFactory;
}
@SuppressWarnings({"unchecked"})
public static List<Object> executeHQLQuery(String query, SessionFactory DBSessionFactory, boolean useSQL) {
List<Object> obj = null;
Session ss = null;
ss = DBSessionFactory.getCurrentSession();
ss.beginTransaction();
Query qr = null;
if (useSQL)
qr = ss.createSQLQuery(query);
else
qr = ss.createQuery(query);
List<Object> result = qr.list();
ss.getTransaction().commit();
if (result == null)
System.out.println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object");
// if (result != null && result.size() == 0)
// System.out.println(String.format("found nothing in database for query: "+query));
if (result != null && result.size() != 0) {
obj = result;
}
rollback(ss);
return obj;
}
public static void executeUpdateNoTransaction(final String query, String drivername,String username, String password, String databaseurl, boolean useSQL) throws Exception{
// Load the database driver
Class.forName(drivername) ;
// Get a connection to the database
Connection conn = DriverManager.getConnection(databaseurl,username,password) ;
// Get a statement from the connection
Statement stmt = conn.createStatement() ;
// Execute the query
stmt.executeUpdate( query) ;
stmt.close() ;
conn.close() ;
}
public static void executeUpdateNoTransaction(final String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{
// System.out.println("executing query: " + query);
Session ss = null;
try {
/*
ss = DBSessionFactory.getCurrentSession();
// System.out.println("executing query");
ss.doWork(new Work() {
@Override
public void execute(Connection conn) throws SQLException {
Statement stmt = conn.createStatement() ;
// Execute the query
ResultSet rs = stmt.executeQuery(query) ;
}
});
*/
} catch (Exception e) {
throw e;
}
}
public static void executeHQLUpdate(String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{
// System.out.println("executing query: " + query);
Session ss = null;
try {
ss = DBSessionFactory.getCurrentSession();
// System.out.println("executing query");
ss.beginTransaction();
Query qr = null;
if (useSQL)
qr = ss.createSQLQuery(query);
else
qr = ss.createQuery(query);
qr.executeUpdate();
ss.getTransaction().commit();
} catch (Exception e) {
rollback(ss);
// e.printStackTrace();
throw e;
}
}
public static void executeNativeUpdate(String query, SessionFactory DBSessionFactory) {
// System.out.println("executing query: " + query);
Session ss = null;
try {
ss = DBSessionFactory.getCurrentSession();
System.out.println("executing query");
ss.beginTransaction();
Query qr = null;
qr = DBSessionFactory.getCurrentSession().getNamedQuery("mySp").setParameter("param", query);
qr.executeUpdate();
ss.getTransaction().commit();
} catch (Exception e) {
rollback(ss);
e.printStackTrace();
}
}
public static void executeSQLUpdate(String query, SessionFactory DBSessionFactory) throws Exception {
executeHQLUpdate(query, DBSessionFactory, true);
}
public static List<Object> executeSQLQuery(String query, SessionFactory DBSessionFactory) {
// System.out.println("QUERY: "+query);
return executeHQLQuery(query, DBSessionFactory, true);
}
public static void rollback(Session ss) {
try {
if (ss != null && ss.getTransaction() != null)
ss.getTransaction().rollback();
} catch (Exception ex) {
} finally {
try {
ss.close();
} catch (Exception ee) {
}
}
}
public static void saveObject(Object obj, SessionFactory DBSessionFactory) throws Exception {
if (DBSessionFactory != null) {
Session ss = null;
try {
ss = DBSessionFactory.getCurrentSession();
ss.beginTransaction();
ss.saveOrUpdate(obj);
ss.getTransaction().commit();
} catch (Exception e) {
rollback(ss);
throw e;
}
}
}
}

View File

@ -0,0 +1,214 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.math.BigInteger;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.hibernate.SessionFactory;
/**
* checks if two tables are equal
* checks numbers at the second decimal position
*/
public class HpecDiscrepanciesCalculator {
private BigInteger numOfElements;
private int errorCounter;
//connection setup
protected String LogFile = "ALog.properties";
//fundamental: set a the chunk csquaresNumber and the maximum number of chunks to take
int chunkSize = 7000;
static double Threshold = 0.01;
//change this defaults to change comparison
public String referenceTable = "speciesrichness";
public String analyzedTable = "crossspecies_nonreviewed";
public String referenceCriteria = "csquarecode,speccount";
public String destinationCriteria = "csquarecode,maxspeciescountinacell";
public String referenceSelectedColumns = "csquarecode,speccount";
public String destinationSelectedColumns = "csquarecode,maxspeciescountinacell";
//selection query
public static String selectElementsQuery = "select %1$s from %2$s order by %3$s";
public static String selectDestElementsQuery = "select %1$s from %2$s where %3$s";
//database connections
protected SessionFactory referencedbConnection;
protected SessionFactory destinationdbConnection;
//init connections
public HpecDiscrepanciesCalculator(GenericConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + LogFile);
referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized");
destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("OriginalDB initialized");
}
//counts the elements in a table
public BigInteger countElements(String tablename, SessionFactory session)
{
BigInteger count = BigInteger.ZERO;
String countingQuery = "select count(*) from "+tablename;
AnalysisLogger.getLogger().debug("Getting DB elements by this query: "+countingQuery);
List<Object> result = DatabaseFactory.executeSQLQuery(countingQuery, session);
count = (BigInteger) result.get(0);
return count;
}
//takes a chunk of elements from the database, belonging to the set of 170 selected species
public List<Object> takeChunkOfDestElements(String tablename,String selectedColumns,String criteria, SessionFactory session,String whereclause) {
String query = String.format(selectDestElementsQuery,selectedColumns,tablename,whereclause);
// AnalysisLogger.getLogger().debug("takeChunkOfDestinationElements-> executing query on DB: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, session);
return results;
}
//takes a chunk of elements from the database, belonging to the set of 170 selected species
public List<Object> takeChunkOfElements(String tablename,String selectedColumns,String criteria, int limit, int offset, SessionFactory session) {
String query = String.format(selectElementsQuery,selectedColumns,tablename,criteria)+ " limit " + limit + " offset " + offset;
AnalysisLogger.getLogger().debug("takeChunkOfElements-> executing query on DB: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, session);
return results;
}
//checks if a string is a number
public double isNumber(String element){
try{
double d = Double.parseDouble(element);
return d;
}catch(Exception e){
return -Double.MAX_VALUE;
}
}
public static void main(String[] args) throws Exception {
String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_micro");
// config.setEnvelopeTable("hspen");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
HpecDiscrepanciesCalculator ec = new HpecDiscrepanciesCalculator(config);
long t0 = System.currentTimeMillis();
ec.runTest();
long t1 = System.currentTimeMillis();
float difference = (t1-t0);
difference = difference /(float)(1000*60);
System.out.println("Elapsed time : "+difference+" min");
}
//runs the test between the tables
public boolean runTest() {
long t0 = System.currentTimeMillis();
// take the number of elements
numOfElements = countElements(analyzedTable, destinationdbConnection);
AnalysisLogger.getLogger().debug("Remote DB contains " + numOfElements + " elements.");
int maxNumber = numOfElements.intValue();
int numOfChunks = maxNumber / chunkSize;
if ((maxNumber % chunkSize) > 0) {
numOfChunks++;
}
int startIndex = 0;
// reset error counter
errorCounter = 0;
boolean equal = true;
boolean error = false;
for (int i = startIndex; i < numOfChunks; i++) {
int offset = i * chunkSize;
List<Object> referencechunk = takeChunkOfElements(referenceTable,referenceSelectedColumns,referenceCriteria, chunkSize, offset, referencedbConnection);
// List<Object> destinationchunk = takeChunkOfElements(analyzedTable,destinationSelectedColumns,destinationCriteria, chunkSize, offset, destinationdbConnection);
int m = referencechunk.size();
for (int j=0;j<m;j++){
Object[] refrow = (Object[]) referencechunk.get(j);
String whereclause = "";
String[] refcolumns = referenceCriteria.split(",");
for (int k=0;k<refcolumns.length;k++){
whereclause+=refcolumns[k].trim()+"='"+refrow[k]+"'";
if (k<refcolumns.length-1)
whereclause+=" and ";
}
List<Object> destinationchunk = takeChunkOfDestElements(analyzedTable,destinationSelectedColumns,destinationCriteria, destinationdbConnection,whereclause);
Object[] destrow = null;
int columns = 0;
if (destinationchunk!=null && destinationchunk.size()>0){
destrow = (Object[]) destinationchunk.get(0);
columns = destrow.length;
}
else{
AnalysisLogger.getLogger().debug("ERROR - COULD NOT FIND "+refrow+" ON DESTINATION TABLE");
for (int k=0;k<refcolumns.length;k++){
AnalysisLogger.getLogger().debug("-"+refrow[k]);
}
error = true;
break;
}
for (int k=0;k<columns;k++){
String refelem = ""+refrow[k];
String destelem = ""+destrow[k];
double d = isNumber(refelem);
// System.out.println(refelem+" vs "+destelem+ " ");
if (d!=-Double.MAX_VALUE){
if (Math.abs(d-isNumber(destelem))>Threshold){
errorCounter++;
equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT NUMBERS COMPARISON: "+refelem+" vs "+destelem);
}
}
else if (!refelem.equals(destelem)){
errorCounter++;
equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT STRING COMPARISON: "+refelem+" vs "+destelem);
}
if (!equal)
break;
}
// System.out.println();
if (!equal)
break;
}
if (error)
break;
if (!equal)
break;
else
AnalysisLogger.getLogger().debug("CHUNK NUMBER "+i+" of "+numOfChunks+" OK!");
}
long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().debug("ELAPSED TIME: " + (t1-t0) + " ms");
//close connections
referencedbConnection.close();
destinationdbConnection.close();
return equal;
}
}

View File

@ -0,0 +1,179 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.math.BigInteger;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.GenericConfiguration;
import org.hibernate.SessionFactory;
/**
* checks if two tables are equal
* checks numbers at the second decimal position
*/
public class TablesDiscrepanciesCalculator {
private BigInteger numOfElements;
private int errorCounter;
//connection setup
protected String LogFile = "ALog.properties";
//fundamental: set a the chunk csquaresNumber and the maximum number of chunks to take
int chunkSize = 7000;
static double Threshold = 0.01;
//change this defaults to change comparison
public String referenceTable = "speciesrichness";
public String analyzedTable = "crossspecies_nonreviewed";
public String referenceCriteria = "csquarecode,speccount";
public String destinationCriteria = "csquarecode,maxspeciescountinacell";
public String referenceSelectedColumns = "csquarecode,speccount";
public String destinationSelectedColumns = "csquarecode,maxspeciescountinacell";
//selection query
public static String selectElementsQuery = "select %1$s from %2$s order by %3$s";
//database connections
protected SessionFactory referencedbConnection;
protected SessionFactory destinationdbConnection;
//init connections
public TablesDiscrepanciesCalculator(GenericConfiguration config) throws Exception {
AnalysisLogger.setLogger(config.getConfigPath() + LogFile);
referencedbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("ReferenceDB initialized");
destinationdbConnection = DatabaseFactory.initDBConnection(config.getConfigPath() + GenericConfiguration.defaultConnectionFile,config);
AnalysisLogger.getLogger().debug("OriginalDB initialized");
}
//counts the elements in a table
public BigInteger countElements(String tablename, SessionFactory session)
{
BigInteger count = BigInteger.ZERO;
String countingQuery = "select count(*) from "+tablename;
AnalysisLogger.getLogger().debug("Getting DB elements by this query: "+countingQuery);
List<Object> result = DatabaseFactory.executeSQLQuery(countingQuery, session);
count = (BigInteger) result.get(0);
return count;
}
//takes a chunk of elements from the database, belonging to the set of 170 selected species
public List<Object> takeChunkOfElements(String tablename,String selectedColumns,String criteria, int limit, int offset, SessionFactory session) {
String query = String.format(selectElementsQuery,selectedColumns,tablename,criteria)+ " limit " + limit + " offset " + offset;
AnalysisLogger.getLogger().debug("takeChunkOfElements-> executing query on DB: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, session);
return results;
}
//checks if a string is a number
public double isNumber(String element){
try{
double d = Double.parseDouble(element);
return d;
}catch(Exception e){
return -Double.MAX_VALUE;
}
}
public static void main(String[] args) throws Exception {
String configPath = "./cfg/";
GenericConfiguration config = new GenericConfiguration();
config.setDistributionTable("hspec_suitable_automatic_local");
config.setConfigPath("./cfg/");
config.setCsquarecodesTable("hcaf_d");
config.setEnvelopeTable("hspen_micro");
// config.setEnvelopeTable("hspen");
config.setCreateTable(true);
config.setNumberOfResources(2);
config.setDatabaseUserName("gcube");
config.setDatabasePassword("d4science2");
config.setDatabaseURL("jdbc:postgresql://localhost/testdb");
TablesDiscrepanciesCalculator ec = new TablesDiscrepanciesCalculator(config);
long t0 = System.currentTimeMillis();
ec.runTest();
long t1 = System.currentTimeMillis();
float difference = (t1-t0);
difference = difference /(float)(1000*60);
System.out.println("Elapsed time : "+difference+" min");
}
//runs the test between the tables
public boolean runTest() {
long t0 = System.currentTimeMillis();
// take the number of elements
numOfElements = countElements(analyzedTable, destinationdbConnection);
AnalysisLogger.getLogger().debug("Remote DB contains " + numOfElements + " elements.");
int maxNumber = numOfElements.intValue();
int numOfChunks = maxNumber / chunkSize;
if ((maxNumber % chunkSize) > 0) {
numOfChunks++;
}
int startIndex = 0;
// reset error counter
errorCounter = 0;
boolean equal = true;
for (int i = startIndex; i < numOfChunks; i++) {
int offset = i * chunkSize;
List<Object> referencechunk = takeChunkOfElements(referenceTable,referenceSelectedColumns,referenceCriteria, chunkSize, offset, referencedbConnection);
List<Object> destinationchunk = takeChunkOfElements(analyzedTable,destinationSelectedColumns,destinationCriteria, chunkSize, offset, destinationdbConnection);
int m = referencechunk.size();
for (int j=0;j<m;j++){
Object[] refrow = (Object[]) referencechunk.get(j);
Object[] destrow = (Object[]) destinationchunk.get(j);
int columns = destrow.length;
for (int k=0;k<columns;k++){
String refelem = ""+refrow[k];
String destelem = ""+destrow[k];
double d = isNumber(refelem);
// System.out.print(refelem+" vs "+destelem+ " ");
if (d!=-Double.MAX_VALUE){
if (Math.abs(d-isNumber(destelem))>Threshold){
errorCounter++;
equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT NUMBERS COMPARISON: "+refelem+" vs "+destelem);
}
}
else if (!refelem.equals(destelem)){
errorCounter++;
equal = false;
AnalysisLogger.getLogger().debug("ERROR - DISCREPANCY AT STRING COMPARISON: "+refelem+" vs "+destelem);
}
if (!equal)
break;
}
// System.out.println();
if (!equal)
break;
}
if (!equal)
break;
else
AnalysisLogger.getLogger().debug("CHUNK NUMBER "+i+" OK!");
}
long t1 = System.currentTimeMillis();
AnalysisLogger.getLogger().debug("ELAPSED TIME: " + (t1-t0) + " ms");
//close connections
referencedbConnection.close();
destinationdbConnection.close();
return equal;
}
}