diff --git a/.classpath b/.classpath deleted file mode 100644 index 756245a..0000000 --- a/.classpath +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/.project b/.project deleted file mode 100644 index b39e3cd..0000000 --- a/.project +++ /dev/null @@ -1,23 +0,0 @@ - - - EcologicalEngine - - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.m2e.core.maven2Builder - - - - - - org.eclipse.m2e.core.maven2Nature - org.eclipse.jdt.core.javanature - - diff --git a/.settings/org.eclipse.core.resources.prefs b/.settings/org.eclipse.core.resources.prefs deleted file mode 100644 index 9b7fa93..0000000 --- a/.settings/org.eclipse.core.resources.prefs +++ /dev/null @@ -1,4 +0,0 @@ -#Thu Jun 21 16:51:19 CEST 2012 -eclipse.preferences.version=1 -encoding//src/main/java=UTF-8 -encoding/=UTF-8 diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs deleted file mode 100644 index 8bfda26..0000000 --- a/.settings/org.eclipse.jdt.core.prefs +++ /dev/null @@ -1,13 +0,0 @@ -#Thu Jun 21 16:51:19 CEST 2012 -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 -org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve -org.eclipse.jdt.core.compiler.compliance=1.6 -org.eclipse.jdt.core.compiler.debug.lineNumber=generate -org.eclipse.jdt.core.compiler.debug.localVariable=generate -org.eclipse.jdt.core.compiler.debug.sourceFile=generate -org.eclipse.jdt.core.compiler.problem.assertIdentifier=error -org.eclipse.jdt.core.compiler.problem.enumIdentifier=error -org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning -org.eclipse.jdt.core.compiler.source=1.6 diff --git a/.settings/org.eclipse.m2e.core.prefs b/.settings/org.eclipse.m2e.core.prefs deleted file mode 100644 index 3c9b7c9..0000000 --- a/.settings/org.eclipse.m2e.core.prefs +++ /dev/null @@ -1,5 +0,0 @@ -#Thu Jun 21 16:32:52 CEST 2012 -activeProfiles= -eclipse.preferences.version=1 -resolveWorkspaceProjects=true -version=1 diff --git a/CHANGELOG b/CHANGELOG deleted file mode 100644 index 5ff0fb3..0000000 --- a/CHANGELOG +++ /dev/null @@ -1,2 +0,0 @@ -v. 1.0.0 (20-04-2011) - * First release diff --git a/INSTALL b/INSTALL deleted file mode 100644 index 9bcbb15..0000000 --- a/INSTALL +++ /dev/null @@ -1 +0,0 @@ -Used as a library in the gCube Framework \ No newline at end of file diff --git a/LICENSE b/LICENSE deleted file mode 100644 index bd0c2b4..0000000 --- a/LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -gCube System - License ------------------------------------------------------------- - -The gCube/gCore software is licensed as Free Open Source software conveying to -the EUPL (http://ec.europa.eu/idabc/eupl). -The software and documentation is provided by its authors/distributors "as is" -and no expressed or implied warranty is given for its use, quality or fitness -for a particular case. diff --git a/MAINTAINERS b/MAINTAINERS deleted file mode 100644 index 1f1e62d..0000000 --- a/MAINTAINERS +++ /dev/null @@ -1,2 +0,0 @@ -Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa, -Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" \ No newline at end of file diff --git a/README b/README deleted file mode 100644 index b13566d..0000000 --- a/README +++ /dev/null @@ -1,42 +0,0 @@ -The gCube System - Ecological Engine Library ------------------------------------------------------------- - -This work is partially funded by the European Commission in the -context of the D4Science project (www.d4science.eu), under the -1st call of FP7 IST priority. - -Authors -------- - -* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa, - Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - -Version and Release Date ------------------------- - -version 1.2.0 (03-05-2012) - -Description --------------------- - -Support library for statistics analysis on Time Series data. - - -Download information --------------------- - -Source code is available from SVN: -http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling - -Binaries can be downloaded from: -http://software.d4science.research-infrastructures.eu/ - -Documentation -------------- -VREManager documentation is available on-line from the Projects Documentation Wiki: -https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling - -Licensing ---------- - -This software is licensed under the terms you may find in the file named "LICENSE" in this directory. diff --git a/build.xml b/build.xml deleted file mode 100644 index 70f1441..0000000 --- a/build.xml +++ /dev/null @@ -1,173 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/cfg/ALog.properties b/cfg/ALog.properties deleted file mode 100644 index cd47834..0000000 --- a/cfg/ALog.properties +++ /dev/null @@ -1,32 +0,0 @@ -#### Use two appenders, one to log to console, another to log to a file -log4j.rootCategory= R - -#### First appender writes to console -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n -#log4j.appender.stdout.layout.ConversionPattern=%m%n -#log4j.appender.stdout.File=Analysis.log - -#### Second appender writes to a file -log4j.logger.AnalysisLogger=trace,stdout, R -log4j.appender.R=org.apache.log4j.RollingFileAppender -#log4j.appender.R=org.apache.log4j.AsyncAppender -#log4j.appender.R.Threshold=INFO -log4j.appender.R.File=Analysis.log -log4j.appender.R.MaxFileSize=50000KB -log4j.appender.R.MaxBackupIndex=2 -log4j.appender.R.layout=org.apache.log4j.PatternLayout -log4j.appender.R.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n -#log4j.appender.R.layout.ConversionPattern=%m%n - -#### Third appender writes to a file -log4j.logger.org.hibernate=H -#log4j.appender.H=org.apache.log4j.RollingFileAppender -log4j.appender.H=org.apache.log4j.AsyncAppender -#log4j.appender.H.File=HibernateLog.log -#log4j.appender.H.MaxFileSize=1024KB -#log4j.appender.H.MaxBackupIndex=2 -log4j.appender.H.layout=org.apache.log4j.PatternLayout -log4j.appender.H.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n - diff --git a/cfg/DestinationDBHibernate.cfg.out.xml b/cfg/DestinationDBHibernate.cfg.out.xml deleted file mode 100644 index e69de29..0000000 diff --git a/cfg/DestinationDBHibernate.cfg.xml b/cfg/DestinationDBHibernate.cfg.xml deleted file mode 100644 index 9418666..0000000 --- a/cfg/DestinationDBHibernate.cfg.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - - org.postgresql.Driver - org.hibernate.connection.C3P0ConnectionProvider - jdbc:postgresql://localhost/testdb - gcube - d4science2 - org.hibernatespatial.postgis.PostgisDialect - org.hibernate.transaction.JDBCTransactionFactory - 0 - 10 - 0 - 1 - thread - - \ No newline at end of file diff --git a/cfg/algorithms.properties b/cfg/algorithms.properties deleted file mode 100644 index bc0a857..0000000 --- a/cfg/algorithms.properties +++ /dev/null @@ -1,13 +0,0 @@ -AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable -AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative -AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative2050 -AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable2050 -REMOTE_AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator -REMOTE_AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator -REMOTE_AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator -REMOTE_AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator -DUMMY=org.gcube.dataanalysis.ecoengine.spatialdistributions.DummyAlgorithm -TEST=org.gcube.dataanalysis.ecoengine.spatialdistributions.TestAlgorithm -AQUAMAPS_NATIVE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNN -AQUAMAPS_SUITABLE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNNSuitable -AQUAMAPS_NEURAL_NETWORK_NS=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNNNS \ No newline at end of file diff --git a/cfg/evaluators.properties b/cfg/evaluators.properties deleted file mode 100644 index 3269cae..0000000 --- a/cfg/evaluators.properties +++ /dev/null @@ -1,2 +0,0 @@ -DISCREPANCY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis -QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis \ No newline at end of file diff --git a/cfg/generators.properties b/cfg/generators.properties deleted file mode 100644 index 0752409..0000000 --- a/cfg/generators.properties +++ /dev/null @@ -1,3 +0,0 @@ -LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator -SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator -REMOTE_RAINYCLOUD=org.gcube.dataanalysis.ecoengine.processing.RainyCloudGenerator \ No newline at end of file diff --git a/cfg/modelers.properties b/cfg/modelers.properties deleted file mode 100644 index d5c652e..0000000 --- a/cfg/modelers.properties +++ /dev/null @@ -1 +0,0 @@ -HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler \ No newline at end of file diff --git a/cfg/models.properties b/cfg/models.properties deleted file mode 100644 index 198f710..0000000 --- a/cfg/models.properties +++ /dev/null @@ -1,3 +0,0 @@ -HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN -AQUAMAPSNN=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNN -AQUAMAPSNNNS=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNNNS \ No newline at end of file diff --git a/cfg/neuralnetwork_Fis-22747_gianpaolo.coro b/cfg/neuralnetwork_Fis-22747_gianpaolo.coro deleted file mode 100644 index 28e6677..0000000 Binary files a/cfg/neuralnetwork_Fis-22747_gianpaolo.coro and /dev/null differ diff --git a/cfg/neuralnetwork_Fis-22747_gianpaolo.coro_best_449_100 b/cfg/neuralnetwork_Fis-22747_gianpaolo.coro_best_449_100 deleted file mode 100644 index 2b83d91..0000000 Binary files a/cfg/neuralnetwork_Fis-22747_gianpaolo.coro_best_449_100 and /dev/null differ diff --git a/cfg/nodealgorithms.properties b/cfg/nodealgorithms.properties deleted file mode 100644 index 4493d3b..0000000 --- a/cfg/nodealgorithms.properties +++ /dev/null @@ -1 +0,0 @@ -AQUAMAPS_SUITABLE=org.gcube.dataanalysis.peeng.models.AquamapsSuitableNode diff --git a/cfg/operators.xml b/cfg/operators.xml deleted file mode 100644 index 93b465c..0000000 --- a/cfg/operators.xml +++ /dev/null @@ -1,2620 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/changelog.xml b/changelog.xml deleted file mode 100644 index e72b78e..0000000 --- a/changelog.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - First Release - - \ No newline at end of file diff --git a/distro/CHANGELOG b/distro/CHANGELOG deleted file mode 100644 index 5ff0fb3..0000000 --- a/distro/CHANGELOG +++ /dev/null @@ -1,2 +0,0 @@ -v. 1.0.0 (20-04-2011) - * First release diff --git a/distro/INSTALL b/distro/INSTALL deleted file mode 100644 index 9bcbb15..0000000 --- a/distro/INSTALL +++ /dev/null @@ -1 +0,0 @@ -Used as a library in the gCube Framework \ No newline at end of file diff --git a/distro/LICENSE b/distro/LICENSE deleted file mode 100644 index bd0c2b4..0000000 --- a/distro/LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -gCube System - License ------------------------------------------------------------- - -The gCube/gCore software is licensed as Free Open Source software conveying to -the EUPL (http://ec.europa.eu/idabc/eupl). -The software and documentation is provided by its authors/distributors "as is" -and no expressed or implied warranty is given for its use, quality or fitness -for a particular case. diff --git a/distro/MAINTAINERS b/distro/MAINTAINERS deleted file mode 100644 index 1f1e62d..0000000 --- a/distro/MAINTAINERS +++ /dev/null @@ -1,2 +0,0 @@ -Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa, -Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" \ No newline at end of file diff --git a/distro/README b/distro/README deleted file mode 100644 index b13566d..0000000 --- a/distro/README +++ /dev/null @@ -1,42 +0,0 @@ -The gCube System - Ecological Engine Library ------------------------------------------------------------- - -This work is partially funded by the European Commission in the -context of the D4Science project (www.d4science.eu), under the -1st call of FP7 IST priority. - -Authors -------- - -* Gianpaolo Coro (gianpaolo.coro@isti.cnr.it), CNR Pisa, - Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" - -Version and Release Date ------------------------- - -version 1.2.0 (03-05-2012) - -Description --------------------- - -Support library for statistics analysis on Time Series data. - - -Download information --------------------- - -Source code is available from SVN: -http://svn.research-infrastructures.eu/d4science/gcube/trunk/content-management/EcologicalModelling - -Binaries can be downloaded from: -http://software.d4science.research-infrastructures.eu/ - -Documentation -------------- -VREManager documentation is available on-line from the Projects Documentation Wiki: -https://gcube.wiki.gcube-system.org/gcube/index.php/Ecological_Modeling - -Licensing ---------- - -This software is licensed under the terms you may find in the file named "LICENSE" in this directory. diff --git a/distro/changelog.xml b/distro/changelog.xml deleted file mode 100644 index e72b78e..0000000 --- a/distro/changelog.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - First Release - - \ No newline at end of file diff --git a/distro/profile.xml b/distro/profile.xml deleted file mode 100644 index 31555e8..0000000 --- a/distro/profile.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - Library - - Ecological Engine Library - EcologicalEngine - ${artifactId} - 1.2.0 - - - ${artifactId} - ${version} - - ${groupId} - ${artifactId} - ${version} - - - ${build.finalName}.jar - - - - - \ No newline at end of file diff --git a/distro/svnpath.txt b/distro/svnpath.txt deleted file mode 100644 index 3f85e8c..0000000 --- a/distro/svnpath.txt +++ /dev/null @@ -1 +0,0 @@ -https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine \ No newline at end of file diff --git a/ecologicalEngine.jardesc b/ecologicalEngine.jardesc deleted file mode 100644 index c8ea568..0000000 --- a/ecologicalEngine.jardesc +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/etc/build.properties b/etc/build.properties deleted file mode 100644 index eaad457..0000000 --- a/etc/build.properties +++ /dev/null @@ -1,4 +0,0 @@ -name = EcologicalEngine -package = org.gcube.dataanalysis.ecoengine -# Where the jar will be stored -lib.dir = Dependencies/org.gcube.dataanalysis.ecoengine.jar diff --git a/etc/profile.xml b/etc/profile.xml deleted file mode 100644 index 7d1ce1d..0000000 --- a/etc/profile.xml +++ /dev/null @@ -1,156 +0,0 @@ - - - - Library - - Ecological Engine Library - DataAnalysis - EcologicalEngine - 1.0.0 - - - Ecological Engine Library - EcologicalEngine - 1.0.0 - - - - Common - hibernate-patched - 3.5.2 - - hibernate-patched - 3.5.2 - - false - - - - ExternalSoftware - postgresql-jdbc - 8.04.00 - - postgresql-jdbc - 8.04.00 - - false - - - - ExternalSoftware - jaxen - 1.1.0 - - jaxen - 1.1.0 - - false - - - - ExternalSoftware - jcommon - 1.0.16 - - jcommon - 1.0.16 - - false - - - - Common - rapidminer-custom - 1.0.0 - - rapidminer-custom - 1.0.0 - - false - - - - ExternalSoftware - hibernate-spatial-postgis - 1.0.0 - - hibernate-spatial-postgis - 1.0.0 - - false - - - - ExternalSoftware - hibernate-spatial - 1.0.0 - - hibernate-spatial - 1.0.0 - - false - - - - ExternalSoftware - postgis - 2.0.0 - - postgis - 2.0.0 - - false - - - - ExternalSoftware - jts - 1.10.0 - - jts - 1.10.0 - - false - - - - ExternalSoftware - slf4j - 1.05.00 - - slf4j - 1.05.00 - - false - - - - ExternalSoftware - google-gson - 1.7.1 - - google-gson - 1.7.1 - - false - - - - ExternalSoftware - jfreechart - 1.0.13 - - jfreechart - 1.0.13 - - false - - - - lib/org.gcube.dataanalysis.ecoengine.jar - - - - - diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 87ede83..0000000 --- a/pom.xml +++ /dev/null @@ -1,227 +0,0 @@ - - 4.0.0 - - maven-parent - org.gcube.tools - 1.0.0 - - - org.gcube.dataanalysis - ecological-engine - 1.3.0-SNAPSHOT - ecological-engine - ecological-engine library - - ${project.basedir}/distro - - - - c3p0 - c3p0 - 0.9.1.2 - - - commons-collections - commons-collections - 3.1 - - - commons-httpclient - commons-httpclient - 3.1 - - - commons-logging - commons-logging - 1.0.4 - - - dom4j - dom4j - 1.6 - - - graphlayout - graphlayout - 1.2.1 - - - com.google.code.gson - gson - 1.7.1 - - - org.hibernatespatial - hibernate-spatial-postgis - 1.0-M2 - - - org.hibernatespatial - hibernate-spatial - 1.0-M2 - - - org.gcube.common - hibernate-patched - 3.5.2 - - - jama - jama - 1.0.2 - - - jaxen - jaxen - 1.1 - - - jfree - jcommon - 1.0.16 - - - jfree - jfreechart - 1.0.13 - - - org.jgrapht - jgrapht - 0.8.2 - - - org.jgrapht - jgrapht-jdk1.6 - 0.8.2 - - - javax.transaction - jta - 1.1 - - - com.vividsolutions - jts - 1.10 - - - log4j - log4j - 1.2.16 - - - org.postgis - postgis - 2.0.0SVN - - - org.postgis - postgis-driver - PostGIS driver - 1.0 - - - postgresql - postgresql - 8.4-702.jdbc4 - - - org.gcube.common - rapidminer-custom - 1.0.0 - - - org.slf4j - slf4j-api - 1.6.4 - - - org.slf4j - slf4j-log4j12 - 1.6.4 - - - xpp3 - xpp3_min - 1.1.4c - - - com.thoughtworks.xstream - xstream - 1.3.1 - - - - - osgeo - Open Source Geospatial Foundation Repository Mirror - http://maven.research-infrastructures.eu/nexus/content/repositories/osgeo// - - - - - - maven-compiler-plugin - - 1.6 - 1.6 - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.12 - - true - - - - org.apache.maven.plugins - maven-resources-plugin - 2.5 - - - copy-profile - install - - copy-resources - - - target - - - ${distroDirectory} - true - - profile.xml - - - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - 2.2 - - - ${distroDirectory}/descriptor.xml - - - - - servicearchive - install - - single - - - - - - - \ No newline at end of file diff --git a/src/org/gcube/contentmanagement/graphtools/abstracts/GenericDBExtractor.java b/src/org/gcube/contentmanagement/graphtools/abstracts/GenericDBExtractor.java deleted file mode 100644 index c4da73d..0000000 --- a/src/org/gcube/contentmanagement/graphtools/abstracts/GenericDBExtractor.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.gcube.contentmanagement.graphtools.abstracts; - -import java.util.Map; - - -public interface GenericDBExtractor { - - public SamplesTable getMonoDimTable (String table, String column); - - public Map getMultiDimTemporalTables(String table, String xDimension, String groupDimension, String yValue, String speciesColumn, String... yFilters); - -} diff --git a/src/org/gcube/contentmanagement/graphtools/abstracts/GenericStandaloneGraph.java b/src/org/gcube/contentmanagement/graphtools/abstracts/GenericStandaloneGraph.java deleted file mode 100644 index cbc6d7a..0000000 --- a/src/org/gcube/contentmanagement/graphtools/abstracts/GenericStandaloneGraph.java +++ /dev/null @@ -1,187 +0,0 @@ -package org.gcube.contentmanagement.graphtools.abstracts; - -import java.awt.Image; -import java.awt.image.BufferedImage; -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import javax.imageio.ImageIO; -import javax.swing.JPanel; - -import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.jfree.chart.ChartPanel; -import org.jfree.chart.JFreeChart; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.ui.ApplicationFrame; -import org.jfree.ui.RefineryUtilities; - -/* - * Converts a GraphData into a graphicable structure DataSet - * GenericStandaloneGraph: GraphData -> DataSet - */ -public abstract class GenericStandaloneGraph extends ApplicationFrame { - - /** - * - */ - private static final long serialVersionUID = 1L; - protected boolean big; - - public GenericStandaloneGraph(String title) { - super(title); - - big = false; - } - - - abstract protected Dataset generateDataset(); - - abstract protected JFreeChart createChart(Dataset dataset); - - abstract protected GenericStandaloneGraph getInstance(String title); - - public void render(Dataset set) { - - render(-1, -1, set); - } - - public void render(double x, double y, Dataset set) { - JFreeChart chart = createChart(set); - JPanel jp = new ChartPanel(chart); - - this.setContentPane(jp); - this.pack(); - if (big) - this.setBounds(0, 0, (int) this.getBounds().getWidth() * 2, (int) this.getBounds().getHeight() * 2); - - if ((x == -1) || (y == -1)) - RefineryUtilities.centerFrameOnScreen(this); - else - RefineryUtilities.positionFrameOnScreen(this, x, y); - - this.setVisible(true); - - } - - public List renderGraphGroupImage(int width, int height, GraphGroups graphgroups) { - - ArrayList images = new ArrayList(); - - Map graphmap = graphgroups.getGraphs(); - double x = 0; - double y = 0; - double max = 1; - // int numberOfGraphs = graphmap.size(); - - for (String key : graphmap.keySet()) { - - GenericStandaloneGraph graph = getInstance(key); - Dataset datas = graph.convert2Dataset(graphmap.get(key)); - images.add(graph.renderImgObject(width, height, datas)); - - x += 0.1; - y += 0.1; - if (x > max || y > max) { - x = 0; - y = 0; - } - } - - return images; - - } - - public void renderImages(String filePath, int width, int height, GraphGroups graphgroups) { - - List images = renderGraphGroupImage(width,height,graphgroups); - int i=0; - for (Image img:images){ - BufferedImage bimage = ImageTools.toBufferedImage(img); - File outputfile = new File(filePath+"_"+i+".png"); - try{ - ImageIO.write(bimage, "png", outputfile); - }catch(Exception e){ - AnalysisLogger.getLogger().error("renderImages->Error in writing files ",e); - } - i++; - } - - } - - public Image renderImgObject(int width, int height, Dataset set) { - JFreeChart chart = createChart(set); - - /* - JPanel jp = new ChartPanel(chart); - - this.setContentPane(jp); - this.pack(); - */ -// Image image = this.createImage(width, height); - - Image image = ImageTools.toImage(chart.createBufferedImage(width, height)); - - return image; - } - - - public void renderGraphGroup(GraphGroups graphgroups) { - - Map graphmap = graphgroups.getGraphs(); - double x = 0; - double y = 0; - double max = 1; - // int numberOfGraphs = graphmap.size(); - - for (String key : graphmap.keySet()) { - - GenericStandaloneGraph graph = getInstance(key); - Dataset datas = graph.convert2Dataset(graphmap.get(key)); - graph.render(x, y, datas); - - x += 0.1; - y += 0.1; - if (x > max || y > max) { - x = 0; - y = 0; - } - } - } - - protected Dataset convert2Dataset(GraphData st) { - - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - List> pointslist = st.getData(); - - // NOTE: after the graph generation graphs are inverted in x and y - int numbOfRows = pointslist.size(); - if (numbOfRows > 0) { - int numbOfCols = pointslist.get(0).getEntries().size(); - - for (int x = 0; x < numbOfRows; x++) { - - String xlabel = pointslist.get(x).getLabel(); - - for (int y = 0; y < numbOfCols; y++) { - - double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue(); - String ylabel = pointslist.get(x).getEntries().get(y).getLabel(); - - // System.out.println("ADDING : "+value+" , "+ylabel+" , "+xlabel); - - dataset.addValue(value, xlabel, ylabel); - } - } - - } - return dataset; - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/abstracts/SamplesTable.java b/src/org/gcube/contentmanagement/graphtools/abstracts/SamplesTable.java deleted file mode 100644 index eb37f5d..0000000 --- a/src/org/gcube/contentmanagement/graphtools/abstracts/SamplesTable.java +++ /dev/null @@ -1,156 +0,0 @@ -package org.gcube.contentmanagement.graphtools.abstracts; - -import java.util.LinkedList; -import java.util.List; - -import com.rapidminer.example.Attribute; -import com.rapidminer.example.ExampleSet; -import com.rapidminer.example.table.AttributeFactory; -import com.rapidminer.example.table.DoubleArrayDataRow; -import com.rapidminer.example.table.MemoryExampleTable; -import com.rapidminer.tools.Ontology; - -public abstract class SamplesTable { - - // attributes = columns of numbers - public double minY = 0; - public double maxY = 0; - public double minX = 0; - public double maxX = 0; - - public ExampleSet generateExampleSet() { - // create attribute list - List attributes = new LinkedList(); - // generate columns for attributes - for (int a = 0; a < getNumOfAttributes(); a++) { - attributes.add(AttributeFactory.createAttribute("att" + a, Ontology.REAL)); - } - - // add a label column - Attribute label = AttributeFactory.createAttribute("label", Ontology.NOMINAL); - attributes.add(label); - - // create table - MemoryExampleTable table = new MemoryExampleTable(attributes); - - // fill table (here : only real values ) - for (int d = 0; d < getNumOfDataRows(); d++) { - // generate a row of double values - double[] data = new double[attributes.size()]; - // fill rows data - for (int a = 0; a < getNumOfAttributes(); a++) { - // all with proper data here - data[a] = getValue(d, a); - } - // maps the nominal classifcation to a double value - data[data.length - 1] = label.getMapping().mapString(getClassification(d)); - - // add data row - table.addDataRow(new DoubleArrayDataRow(data)); - } - - // create example set - ExampleSet exampleSet = table.createExampleSet(label); - - return exampleSet; - - } - - public void generateSampleTable(ExampleSet es) { - - MemoryExampleTable met = (MemoryExampleTable) es.getExampleTable(); - int numofcolumns = met.getAttributeCount(); - int numofrows = met.size(); - // System.out.println("COL "+numofcolumns+" ROWS "+numofrows); - - for (int i = 0; i < numofrows; i++) { - - Attribute labelAtt = met.getAttribute(numofcolumns - 1); - int index = (int) met.getDataRow(i).get(labelAtt); - String label = labelAtt.getMapping().mapIndex(index); - addSampleRow(label, 0); - - // addLabel(i,label); - - for (int j = 0; j < numofcolumns - 1; j++) { - Attribute att = AttributeFactory.createAttribute("att" + j, Ontology.REAL); - att.setTableIndex(j); - // System.out.println("ADDING TO " + i+","+j); - DoubleArrayDataRow dadr = (DoubleArrayDataRow) met.getDataRow(i); - double element = dadr.get(att); - - addSample(i, j, element); - } - - } - - } - - - abstract public int getNumOfAttributes(); - - abstract public int getNumOfDataRows(); - - abstract public double getValue(int d, int a); - - abstract public String getClassification(int d); - - public String toString() { - - StringBuffer bs = new StringBuffer(); - - bs.append("NUMBER OF ROWS: " + getNumOfDataRows() + "\n"); - bs.append("NUMBER OF COLUMNS: " + getNumOfAttributes() + "\n"); - - for (int i = 0; i < getNumOfDataRows(); i++) { - bs.append("ROW " + i + " : "); - bs.append("LABEL " + getClassification(i) + " : "); - - for (int j = 0; j < getNumOfAttributes(); j++) { - bs.append(getValue(i, j) + "\t"); - } - - bs.append("\n"); - } - - return bs.toString(); - } - - public void calculateBounds() { - - int Ylen = getNumOfAttributes(); - int Xlen = getNumOfDataRows(); - - for (int i = 0; i < Xlen; i++) { - for (int j = 0; j < Ylen; j++) { - double localmin = minY; - double localmax = maxY; - if (j == 0) { - localmin = minX; - localmax = maxX; - } - double point = getValue(i, j); - if (point < localmin) { - localmin = point; - } else if (point > localmax) { - localmax = point; - } - if (j == 0) { - minX = localmin; - maxX = localmax; - } else { - minY = localmin; - maxY = localmax; - } - } - } - - } - - abstract public void addLabel(int i, String label); - - abstract public void addSample(int i, int j, double value); - - abstract public void addSampleRow(String label, double... values); - -} diff --git a/src/org/gcube/contentmanagement/graphtools/core/StatisticsGenerator.java b/src/org/gcube/contentmanagement/graphtools/core/StatisticsGenerator.java deleted file mode 100644 index c8ce539..0000000 --- a/src/org/gcube/contentmanagement/graphtools/core/StatisticsGenerator.java +++ /dev/null @@ -1,192 +0,0 @@ -package org.gcube.contentmanagement.graphtools.core; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable; -import org.gcube.contentmanagement.graphtools.core.filters.Filter; -import org.gcube.contentmanagement.graphtools.data.GraphSamplesTable; -import org.gcube.contentmanagement.graphtools.data.conversions.GraphConverter2D; -import org.gcube.contentmanagement.graphtools.data.databases.CommonDBExtractor; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.hibernate.SessionFactory; - -import com.rapidminer.RapidMiner; -import com.rapidminer.example.ExampleSet; -import com.rapidminer.operator.preprocessing.sampling.AbsoluteSampling; -import com.rapidminer.tools.OperatorService; -import com.thoughtworks.xstream.XStream; -import com.thoughtworks.xstream.io.xml.DomDriver; - -public class StatisticsGenerator { - - public static void main(String[] args) throws Exception { - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; - StatisticsGenerator stg = new StatisticsGenerator(); - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - // database Parameters - conf.setDatabaseUserName("root"); - // conf.setDatabasePassword("password"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - // stg.init("./cfg/"); - stg.init("./cfg/", conf); - - stg.generateGraphs(3, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - } - - SessionFactory referenceDBSession; - CommonDBExtractor extractor; - private static final String LogFile = "ALog.properties"; - private static final String HibFile = "hibernate.cfg.xml"; - private static final String OperatorsFile = "operators.xml"; - private List ColumnFilters; - private Filter XRangeFilter; - private Filter YRangeFilter; - private XStream xStream; - - public void init(String cfgPath) throws Exception { - init(cfgPath, null); - } - - public SessionFactory getDBSession() { - return this.referenceDBSession; - } - - public void init(String cfgPath, LexicalEngineConfiguration config) throws Exception { - AnalysisLogger.setLogger(cfgPath + "/" + LogFile); - if (config == null) - referenceDBSession = DatabaseFactory.initDBConnection(cfgPath + HibFile); - else - referenceDBSession = DatabaseFactory.initDBConnection(cfgPath + HibFile, config); - - ColumnFilters = new ArrayList(); - - extractor = new CommonDBExtractor(referenceDBSession); - - AnalysisLogger.getLogger().info("StatisticsGenerator->initialization complete"); - System.setProperty("rapidminer.init.operators", cfgPath + OperatorsFile); - xStream = new XStream(new DomDriver()); - RapidMiner.init(); - } - - public void resetFilters(){ - ColumnFilters = new ArrayList(); - } - - public void addColumnFilter(String column, String element, String operator) { - ColumnFilters.add(new Filter(column, element, operator)); - } - - public void addColumnFilter(String column, String element) { - ColumnFilters.add(new Filter(column, element)); - } - - public void addXRangeFilter(String xmin, String xmax) { - XRangeFilter = new Filter(xmin, xmax); - } - - public void addYRangeFilter(String ymin, String ymax) { - YRangeFilter = new Filter(ymin, ymax); - } - - - public GraphGroups generateGraphs(int maxElements, String timeSeriesTable, String xDimension, String yDimension, String groupDimension, String speciesColumn, String... filters) throws Exception { - - Map samplesMap = extractor.getMultiDimTemporalTables(ColumnFilters, YRangeFilter, timeSeriesTable, xDimension, groupDimension, yDimension, speciesColumn, filters); - - AnalysisLogger.getLogger().info("StatisticsGenerator-> samplesMap has been generated"); - AnalysisLogger.getLogger().trace(samplesMap.toString()); - // setup Absolute Sampling operator - AbsoluteSampling asop = (AbsoluteSampling) OperatorService.createOperator("AbsoluteSampling"); - asop.setParameter("sample_size", "" + maxElements); - asop.setParameter("local_random_seed", "-1"); - - // setup graphgroups - GraphGroups graphgroups = new GraphGroups(); - - int i = 1; - // for each samples table perform processing - for (String key : samplesMap.keySet()) { - // get samples table - SamplesTable stable = samplesMap.get(key); - // transform samples table into a list of points - List> singlegraph = GraphConverter2D.transformTable(stable); - - // filter XRange if necessary - if (XRangeFilter != null) { - singlegraph = GraphConverter2D.filterXRange(singlegraph, XRangeFilter.getFirstElement(), XRangeFilter.getSecondElement()); - } - - // setup the graph samples table to perform mining processing - GraphSamplesTable graphSamples = new GraphSamplesTable(singlegraph); - // if there are too many samples, perform downsampling - if (graphSamples.getNumOfDataRows() > maxElements) { - // generate an Example Set for Rapid Miner - ExampleSet es = graphSamples.generateExampleSet(); - // apply Sampling - es = asop.apply(es); - // generate a new graph samples table - graphSamples = new GraphSamplesTable(); - graphSamples.generateSampleTable(es); - - // get the points list from the graph samples table - singlegraph = graphSamples.getGraph(); - AnalysisLogger.getLogger().trace("Regeneration\n" + graphSamples.toString()); - } - - // reorder the elements of the points list - // this steps performs re-enumeration and reordering of the rows after the sampling operations - singlegraph = GraphConverter2D.reorder(singlegraph); - - AnalysisLogger.getLogger().trace("Reordering\n" + singlegraph.toString()); - if ((singlegraph != null)&&(singlegraph.size()>0)) { - // build up the GraphData for visualization - GraphData grd = new GraphData(singlegraph, true); - - // calculate the bounds of the graph - graphSamples.calculateBounds(); - - // set the bounds - grd.setMaxY(graphSamples.maxY); - grd.setMinY(graphSamples.minY); - - // add the points list - graphgroups.addGraph("Distribution for " + key, grd); - - AnalysisLogger.getLogger().trace("StatisticsGenerator-> graphgroup " + i + " generated with key: " + key); - i++; - } - } - - AnalysisLogger.getLogger().info("StatisticsGenerator-> graphgroups have been generated"); - - return graphgroups; - - } - - public String generateStringGraphs(int maxElements, String timeSeriesTable, String xDimension, String yDimension, String groupDimension, String speciesColumn, String... filters) throws Exception { - GraphGroups gg = generateGraphs(maxElements, timeSeriesTable, xDimension, yDimension, groupDimension, speciesColumn, filters); - - return xStream.toXML(gg); - } - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/core/filters/Filter.java b/src/org/gcube/contentmanagement/graphtools/core/filters/Filter.java deleted file mode 100644 index bc65333..0000000 --- a/src/org/gcube/contentmanagement/graphtools/core/filters/Filter.java +++ /dev/null @@ -1,71 +0,0 @@ -package org.gcube.contentmanagement.graphtools.core.filters; - -public class Filter { - - String firstElement; - String secondElement; - String operator; - - public Filter(String first,String second, String operator){ - setFirstElement(first); - setSecondElement(second); - setOperator(operator); - } - - public Filter(String first,String second){ - setFirstElement(first); - setSecondElement(second); - setOperator("="); - } - - public void setFirstElement(String first){ - firstElement = first; - } - - public void setSecondElement(String second){ - secondElement = second; - } - - public void setOperator(String oper){ - operator = oper; - } - - public double getFirstNumber() { - double d = 0; - try { - d = Double.parseDouble(firstElement); - } catch (Exception e) { - - } - return d; - } - - public double getSecondNumber() { - double d = 0; - try { - d = Double.parseDouble(secondElement); - } catch (Exception e) { - - } - return d; - } - - public String getFirstElement() { - return firstElement; - } - - public String getSecondElement() { - return secondElement; - } - - public String toString(String logicoperator){ - - return logicoperator+" "+firstElement+operator+"'"+secondElement+"' "; - } - - public String toString(){ - - return "or "+firstElement+operator+"'"+secondElement+"' "; - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/data/BigSamplesTable.java b/src/org/gcube/contentmanagement/graphtools/data/BigSamplesTable.java deleted file mode 100644 index 7dbd93b..0000000 --- a/src/org/gcube/contentmanagement/graphtools/data/BigSamplesTable.java +++ /dev/null @@ -1,85 +0,0 @@ -package org.gcube.contentmanagement.graphtools.data; - -import java.util.HashMap; - -import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable; - -public class BigSamplesTable extends SamplesTable { - - BigSparseTable table; - HashMap classifications; - Integer currentIndex; - - public BigSamplesTable() { - table = new BigSparseTable(); - classifications = new HashMap(); - currentIndex = 0; - } - - @Override - public int getNumOfAttributes() { - return table.width().intValue(); - } - - @Override - public int getNumOfDataRows() { - - return table.size().intValue(); - - } - - @Override - public double getValue(int d, int a) { - - return table.get(d, a); - - } - - @Override - public String getClassification(int d) { - - return classifications.get(d); - } - - public void addSampleRow(String label, double... values) { - - classifications.put(currentIndex, label); - int j = 0; - for (Double value : values) { - - table.add(currentIndex, j, value); - - j++; - } - - currentIndex = currentIndex + 1; - } - - public void addSample(int i, int j, double value) { - - if (i < currentIndex) - table.add(i, j, value); - } - - public void addLabel(int i, String label) { - - if (i < currentIndex) - classifications.put(i, label); - } - - - public static void main(String[] args){ - - BigSamplesTable bst = new BigSamplesTable(); - bst.addSampleRow("prova 1", 10, 12,13,14,15); - bst.addSampleRow("prova 2", 20, 15,14,15); - bst.addSampleRow("prova 3", 30, 11,110,150); - bst.addSample(0, -1,150); - System.out.println(bst.toString()); - - bst.generateExampleSet(); - - - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/data/BigSparseTable.java b/src/org/gcube/contentmanagement/graphtools/data/BigSparseTable.java deleted file mode 100644 index 4aa3be1..0000000 --- a/src/org/gcube/contentmanagement/graphtools/data/BigSparseTable.java +++ /dev/null @@ -1,82 +0,0 @@ -package org.gcube.contentmanagement.graphtools.data; - -import java.util.HashMap; - -public class BigSparseTable { - - HashMap> table; - Integer tableSize; - Integer tableWidth; - - public BigSparseTable() { - table = new HashMap>(); - tableSize = 0; - tableWidth = 0; - } - - public Integer size() { - - return tableSize; - - } - - public Integer width() { - - return tableWidth; - - } - - public void add(Integer i, Integer j, double value) { - -// System.out.println("ADDING " + i + "," + j); - if ((i<0)||(j<0)) - return; - - double val = get(i, j); - - if (val != 0) { - table.get(i).put(j, value); - } - - else { - HashMap row; - - // if size<=i create a new hashmap - if (tableSize <= i) { - row = new HashMap(); - table.put(i, row); - - tableSize = i + 1; - } - // else get i-th hashmap - else { - row = table.get(i); - if (row == null) { - row = new HashMap(); - table.put(i, row); - } - } - - row.put(j, value); - - if (tableWidth <= j) - tableWidth = j + 1; - } - } - - // default is 0 - public double get(Integer i, Integer j) { - - Double value = null; - - if (tableSize.compareTo(i) > 0) { - value = table.get(i).get(j); - } - - if (value == null) - value = Double.valueOf(0); - - return value; - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/data/GraphSamplesTable.java b/src/org/gcube/contentmanagement/graphtools/data/GraphSamplesTable.java deleted file mode 100644 index 7c4c4cc..0000000 --- a/src/org/gcube/contentmanagement/graphtools/data/GraphSamplesTable.java +++ /dev/null @@ -1,178 +0,0 @@ -package org.gcube.contentmanagement.graphtools.data; - -import java.util.ArrayList; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.gcube.portlets.user.timeseries.charts.support.types.ValueEntry; - -/* - * Represents a graphicable SamplesTable to be converted into a GraphData - * Performs transformation from GraphData to SamplesTable - * GraphConverter2D : SamplesTable -> GraphData - * GraphSamplesTable: GraphData -> SamplesTable - */ - -/* - * - * Structure of a transposed graph coming from common structure library - * - * Point 1 (Series1, DefaultValue1) -> (y1,xLabel1) (y2,xLabel2) (y3,xLabel3) .. - * Point 2 (Series2, DefaultValue2) -> (y1,xLabel1) (y2,xLabel2) (y3,xLabel3) .. - * ... - * - * Structure of a not-transposed graph coming from database - * - * Point 1 (xLabel1,EnumeratedValue1) -> (y1,Series1) (y2,Series2) (y3,Series3) .. - * Point 2 (xLabel2,EnumeratedValue2) -> (y1,Series1) (y2,Series2) (y3,Series3) .. - * ... - */ -public class GraphSamplesTable extends SamplesTable { - - List> singlegraph; - - public GraphSamplesTable(List> graph) { - super(); - singlegraph = graph; - } - - public GraphSamplesTable() { - super(); - singlegraph = new ArrayList>(); - } - - // builds up a graph from a set of values and lables - // used for building up graphs from simple data - public GraphSamplesTable(String seriesName, List xLables, List yValues,boolean invert) { - super(); - singlegraph = new ArrayList>(); - int size = xLables.size(); - try { - - if (invert){ - - Point p = new Point(seriesName, Double.valueOf(0)); - for (int i = 0; i < size; i++) { - ValueEntry v = new ValueEntry(xLables.get(i), yValues.get(i)); - p.addEntry(v); - } - singlegraph.add(p); - - } - else{ - for (int i = 0; i < size; i++) { - Point p = new Point(xLables.get(i),new Double(i)); - ValueEntry v = new ValueEntry("series1", yValues.get(i)); - p.addEntry(v); - singlegraph.add(p); - } - } - - } catch (Exception e) { - - } - } - - @Override - public int getNumOfAttributes() { - if (singlegraph.size() > 0) - return singlegraph.get(0).getEntries().size() + 1; - else - return 0; - - } - - @Override - public int getNumOfDataRows() { - return singlegraph.size(); - } - - @Override - public double getValue(int d, int a) { - Double doub; - if (a == 0) - doub = (Double) singlegraph.get(d).getValue(); - else - doub = (Double) singlegraph.get(d).getEntries().get(a - 1).getValue(); - - return doub.doubleValue(); - } - - @Override - public String getClassification(int d) { - - StringBuffer sb = new StringBuffer(); - int numbOfColumns = getNumOfAttributes(); - sb.append(singlegraph.get(d).getLabel() + ";"); - for (int i = 0; i < numbOfColumns - 1; i++) { - sb.append(singlegraph.get(d).getEntries().get(i).getLabel()); - if (i < numbOfColumns - 2) - sb.append(";"); - } - return sb.toString(); - } - - @Override - public void addLabel(int i, String label) { - singlegraph.get(i).setLabel(label); - } - - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Override - public void addSample(int i, int j, double value) { - try { - - if (j == 0) { - Point p = singlegraph.get(i); - p.setValue(Double.valueOf(value)); - } else { - Point p = (Point) singlegraph.get(i); - p.getEntries().get(j - 1).setValue(new Double(value)); - } - - } catch (Exception e) { - e.printStackTrace(); - } - - } - - @SuppressWarnings("unchecked") - @Override - public void addSampleRow(String label, double... values) { - try { - - Double x = values[0]; - Double y1 = Double.valueOf(0); - if (values.length > 1) - y1 = values[1]; - - String[] lables = label.split(";"); - int labsize = lables.length; - String labelx = lables[0]; - String labely1 = lables[1]; - - ValueEntry ve = new ValueEntry(labely1, y1); - Point p = new Point(labelx, x, ve); - - for (int j = 2; j < labsize; j++) { - - Double y = new Double(0); - if (values.length > j) - y = values[j]; - - p.getEntries().add(new ValueEntry(lables[j], y)); - } - - singlegraph.add(p); - - } catch (Exception e) { - e.printStackTrace(); - } - } - - public List> getGraph() { - return singlegraph; - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/data/conversions/GraphConverter2D.java b/src/org/gcube/contentmanagement/graphtools/data/conversions/GraphConverter2D.java deleted file mode 100644 index 79a0f8e..0000000 --- a/src/org/gcube/contentmanagement/graphtools/data/conversions/GraphConverter2D.java +++ /dev/null @@ -1,410 +0,0 @@ -package org.gcube.contentmanagement.graphtools.data.conversions; - -import java.util.ArrayList; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable; -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.gcube.portlets.user.timeseries.charts.support.types.ValueEntry; - -/* - * Performs Operations on Lists of Points - * Helps in transforming a SamplesTable to a GraphData - */ -public class GraphConverter2D { - - private static Point searchPoint(Point x, List> samples) { - Point novelpoint = null; - for (Point point : samples) { - // if (point.getValue().equals(x.getValue())) { - if (point.getLabel().equals(x.getLabel())) { - novelpoint = point; - break; - } - } - - return novelpoint; - } - - // optimizes the dimensions of the sample table, ordering the x and y entries - - @SuppressWarnings({ "rawtypes", "unchecked" }) - public static List> reduceDimension(List> samples) { - ArrayList> novelSamples = new ArrayList>(); - int novelCounter = 0; - try { - for (Point pointsample : samples) { - // search the current point in the new built list - Point novelPoint = searchPoint(pointsample, novelSamples); - - int index = 0; - ValueEntry pointValue = null; - // if it is not the first insertion then find the optimal index for the y value of the current point - // that is: find the column to insert the value - if (novelCounter > 0) { - // find column index - pointValue = pointsample.getEntries().get(0); - List referencevalues = novelSamples.get(0).getEntries(); - int i = 0; - index = referencevalues.size(); - // get the best column - for (Object val : referencevalues) { - if (((ValueEntry) val).getLabel().equals(pointValue.getLabel())) { - index = i; - break; - } - i++; - } - } - // if the point has not been inserted yet (there isn't another point with the same label previously inserted) - if (novelPoint == null) { - // generate a new y - ValueEntry ve = new ValueEntry(pointsample.getEntries().get(0).getLabel(), pointsample.getEntries().get(0).getValue()); - // generate a new (x,y) - novelPoint = new Point(pointsample.getLabel(), pointsample.getValue()); - - // the number of columns to fill with 0s corresponds to all the columns - int numofcolumns = index; - if (novelCounter > 0) - numofcolumns = novelSamples.get(0).getEntries().size(); - - // fill all the columns with 0s - for (int j = 0; j < numofcolumns; j++) { - novelPoint.getEntries().add(j, new ValueEntry(novelSamples.get(0).getEntries().get(j).getLabel(), Double.valueOf(0))); - } - - // add the y at the right column according to the calculated index - if (index >= novelPoint.getEntries().size()) - novelPoint.getEntries().add(index, ve); - else - novelPoint.getEntries().set(index, ve); - - // add the new point in the list - novelSamples.add(novelPoint); - - novelCounter++; - } else { - - // if we found a previous element update it - if (index >= novelPoint.getEntries().size()) - // if the index is higher than the y size, add the column at the end - novelPoint.getEntries().add(index, pointValue); - else - // otherwise substitute the current index value - novelPoint.getEntries().set(index, pointValue); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - - return novelSamples; - - } - - // converts a bidimensional sampleTable to a list of bidimensional Points - @SuppressWarnings("unchecked") - public static List> convert(SamplesTable sampleTable) { - ArrayList> pointsList = new ArrayList>(); - try { - // every point has a label and a list of associated y points - int rows = sampleTable.getNumOfDataRows(); - for (int rowIndex = 0; rowIndex < rows; rowIndex++) { - // take the label: it is separated in two parts separated by ';' - String label = sampleTable.getClassification(rowIndex); - int commaIndex = label.indexOf(";"); - String xlabel = label; - String ylabel = ""; - if (commaIndex > 0) { - xlabel = label.substring(0, commaIndex); - ylabel = label.substring(commaIndex + 1); - } - double x = sampleTable.getValue(rowIndex, 0); - double y = sampleTable.getValue(rowIndex, 1); - ValueEntry ve = new ValueEntry(ylabel, y); - - Point p = new Point(xlabel, x, ve); - pointsList.add(p); - } - } catch (Exception e) { - e.printStackTrace(); - } - return pointsList; - } - - @SuppressWarnings("rawtypes") - public static List> deleteHeaders(List> samples) { - - int size = samples.size(); - - for (int i = 0; i < size; i++) { - Point p = samples.get(i); - if (p.getLabel().equals("header")) { - samples.remove(i); - size--; - i--; - } - } - - return samples; - } - - // performs a complete transformation - public static List> transformTable(SamplesTable sampleTable) { - - List> singlegraph = convert(sampleTable); - singlegraph = reduceDimension(singlegraph); - singlegraph = deleteHeaders(singlegraph); - - return singlegraph; - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - public static List> reorder(List> samples) { - - List> orderedsamples = new ArrayList>(); - // check and reorder points - for (Point p : samples) { - int index = 0; - - for (Point ordP : orderedsamples) { - if (ordP.getValue().doubleValue() > p.getValue().doubleValue()) { - break; - } - - index++; - } - - orderedsamples.add(index, p); - } - // re-enumerate x dimension - int i = 0; - for (Point ordP : orderedsamples) { - try { - ordP.setValue(Double.valueOf(i)); - } catch (Exception e) { - } - i++; - } - return orderedsamples; - } - - @SuppressWarnings({ "rawtypes", "unchecked" }) - public static List> filterXRange(List> samples, String minX, String maxX) { - - List> filteredsamples = new ArrayList>(); - boolean copy = false; - for (Point p : samples) { - if (p.getLabel().equals(minX)) { - copy = true; - } - if (copy) { - filteredsamples.add(p); - } - if (p.getLabel().equals(maxX)) { - break; - } - } - return filteredsamples; - - } - - public static final String SPIKE = "STATIONARY"; - - public static void anotateStationaryPoints(GraphGroups gg) { - - for (String key : gg.getGraphs().keySet()) { - GraphData graph = gg.getGraphs().get(key); - - // for each series - int trends = graph.getData().size(); - int yvalues = graph.getData().get(0).getEntries().size(); - // System.out.println("yvalues "+yvalues); - // System.out.println("trends "+trends); - for (int i = 0; i < trends; i++) { - double[] points = MathFunctions.points2Double(graph.getData(), i, yvalues); - double[] derivative = MathFunctions.derivative(points); - boolean[] spikes = MathFunctions.findSpikes(derivative, threshold); - for (int k = 0; k < yvalues; k++) { - if (spikes[k]) { - String label = graph.getData().get(i).getEntries().get(k).getLabel(); - String newLabel = label + ";" + SPIKE; - graph.getData().get(i).getEntries().get(k).setLabel(newLabel); - } - } - } - } - // return gg; - } - - private static double threshold = 0.001; - - public static void anotateStationaryPoints(GraphGroups gg, List lables) { - - for (String key : gg.getGraphs().keySet()) { - GraphData graph = gg.getGraphs().get(key); - - // for each series - int trends = graph.getData().size(); - int yvalues = graph.getData().get(0).getEntries().size(); - int spikeslablessize = lables.size(); - // System.out.println("yvalues "+yvalues); - // System.out.println("trends "+trends); - for (int i = 0; i < trends; i++) { - double[] points = MathFunctions.points2Double(graph.getData(), i, yvalues); - double[] derivative = MathFunctions.derivative(points); - boolean[] spikes = MathFunctions.findSpikes(derivative, threshold); - int spikecounter = 0; - for (int k = 0; k < yvalues; k++) { - if (spikes[k]) { - String label = graph.getData().get(i).getEntries().get(k).getLabel(); - String spikelable = SPIKE; - if (spikecounter < spikeslablessize) - spikelable = lables.get(spikecounter); - - String newLabel = label + ";" + spikelable; - graph.getData().get(i).getEntries().get(k).setLabel(newLabel); - spikecounter++; - } - } - } - } - // return gg; - } - - public static void anotatePoints(GraphGroups gg, List pointsIndexes, List lables) { - - for (String key : gg.getGraphs().keySet()) { - GraphData graph = gg.getGraphs().get(key); - - // for each series - int trends = graph.getData().size(); - for (int i = 0; i < trends; i++) { - int progressive = 0; - for (Integer index : pointsIndexes) { - String label = graph.getData().get(i).getEntries().get(index.intValue()).getLabel(); - String addinglabel = lables.get(progressive); - String newLabel = label; - if (addinglabel != null) - newLabel += ";" + addinglabel; - graph.getData().get(i).getEntries().get(index.intValue()).setLabel(newLabel); - progressive++; - } - } - } - // return gg; - } - - // works a single trend in the graph - public static List getStationaryPoints(GraphData graph) throws Exception { - List st = new ArrayList(); - Point p = graph.getData().get(0); - st.add(new Point(p.getLabel(), p.getValue())); - int yvalues = graph.getData().get(0).getEntries().size(); - double[] points = MathFunctions.points2Double(graph.getData(), 0, yvalues); - double[] derivative = MathFunctions.derivative(points); - boolean[] spikes = MathFunctions.findSpikes(derivative, threshold); - - for (int k = 0; k < yvalues; k++) { - if (spikes[k]) { - String label = graph.getData().get(0).getEntries().get(k).getLabel(); - Double val = points[k]; - ValueEntry v = new ValueEntry(label, val); - st.get(0).addEntry(v); - } - } - - return st; - } - - // works a single trend in the graph - public static List getLablesFromPoints(Point points) throws Exception { - List lables = new ArrayList(); - - for (Object v : points.getEntries()) { - - lables.add(((ValueEntry) v).getLabel()); - } - - return lables; - } - - // works the first trend in the graph: takes a list of points according to a list of indexes - public static List getLabelsfromIndexes(List> points, List indexes) throws Exception { - List lables = new ArrayList(); - int size = indexes.size(); - - for (Integer index : indexes) { - Object v = points.get(0).getEntries().get(index); - lables.add(((ValueEntry) v).getLabel()); - } - - return lables; - } - - // works the first trend in the graph: takes a list of points according to a list of indexes - public static void sampleAnotationBySameFollower(List> samples) throws Exception { - - String previousLabel = null; - // check and reorder points - for (Point p : samples) { - - for (Object v : p.getEntries()) { - - String label = ((ValueEntry) v).getLabel(); - - int indexcomma = label.indexOf(";"); - if (indexcomma >= 0) { - - String labelcountry = label.substring(indexcomma + 1); - // AnalysisLogger.getLogger().debug("sampleAnotationBySameFollower-> comparing "+labelcountry+" vs "+previousLabel+" ORIGINAL "+label); - if ((previousLabel != null) && (labelcountry.equals(previousLabel))) { - label = label.substring(0, indexcomma); - // AnalysisLogger.getLogger().debug("sampleAnotationBySameFollower-> ELIMINATING LABEL!!!"); - ((ValueEntry) v).setLabel(label); - } else - previousLabel = labelcountry; - } - } - - } - } - - // works the first trend in the graph: takes a list of points according to a list of indexes - public static void sampleAnotationByRange(List> samples, int range) throws Exception { - - if (range > 0) { - // check and reorder points - for (Point p : samples) { - int partialCounter = 0; - for (Object v : p.getEntries()) { - - String label = ((ValueEntry) v).getLabel(); - - int indexcomma = label.indexOf(";"); - if (indexcomma >= 0) { - //if not enough time has passed delete the label otherwise reset counter - if (partialCounter <= range) { - String labelcountry = label.substring(indexcomma + 1); -// AnalysisLogger.getLogger().debug("sampleAnotationByRange-> partial counter "+partialCounter+ " label "+ label); - label = label.substring(0, indexcomma); -// AnalysisLogger.getLogger().debug("sampleAnotationByRange-> ELIMINATING LABEL!!!"); - ((ValueEntry) v).setLabel(label); - } - else{ - partialCounter = 0; - } - } - - partialCounter++; - - } - - } - } - } -} diff --git a/src/org/gcube/contentmanagement/graphtools/data/conversions/ImageTools.java b/src/org/gcube/contentmanagement/graphtools/data/conversions/ImageTools.java deleted file mode 100644 index 592157e..0000000 --- a/src/org/gcube/contentmanagement/graphtools/data/conversions/ImageTools.java +++ /dev/null @@ -1,79 +0,0 @@ -package org.gcube.contentmanagement.graphtools.data.conversions; - -import java.awt.Graphics; -import java.awt.GraphicsConfiguration; -import java.awt.GraphicsDevice; -import java.awt.GraphicsEnvironment; -import java.awt.HeadlessException; -import java.awt.Image; -import java.awt.Toolkit; -import java.awt.Transparency; -import java.awt.image.BufferedImage; -import java.awt.image.PixelGrabber; - -import javax.swing.ImageIcon; - -public class ImageTools { - - - - public static Image toImage(BufferedImage bufferedImage) { - return Toolkit.getDefaultToolkit().createImage(bufferedImage.getSource()); - } - - public static BufferedImage toBufferedImage(Image image) { - if (image instanceof BufferedImage) {return (BufferedImage)image;} - - // This code ensures that all the pixels in the image are loaded - image = new ImageIcon(image).getImage(); - - // Determine if the image has transparent pixels - boolean hasAlpha = hasAlpha(image); - - // Create a buffered image with a format that's compatible with the screen - BufferedImage bimage = null; - GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment(); - try { - // Determine the type of transparency of the new buffered image - int transparency = Transparency.OPAQUE; - if (hasAlpha == true) {transparency = Transparency.BITMASK;} - - // Create the buffered image - GraphicsDevice gs = ge.getDefaultScreenDevice(); - GraphicsConfiguration gc = gs.getDefaultConfiguration(); - bimage = gc.createCompatibleImage(image.getWidth(null), image.getHeight(null), transparency); - } - catch (HeadlessException e) {} //No screen - - if (bimage == null) { - // Create a buffered image using the default color model - int type = BufferedImage.TYPE_INT_RGB; - if (hasAlpha == true) {type = BufferedImage.TYPE_INT_ARGB;} - bimage = new BufferedImage(image.getWidth(null), image.getHeight(null), type); - } - - // Copy image to buffered image - Graphics g = bimage.createGraphics(); - - // Paint the image onto the buffered image - g.drawImage(image, 0, 0, null); - g.dispose(); - - return bimage; - } - - public static boolean hasAlpha(Image image) { - // If buffered image, the color model is readily available - if (image instanceof BufferedImage) {return ((BufferedImage)image).getColorModel().hasAlpha();} - - // Use a pixel grabber to retrieve the image's color model; - // grabbing a single pixel is usually sufficient - PixelGrabber pg = new PixelGrabber(image, 0, 0, 1, 1, false); - try {pg.grabPixels();} catch (InterruptedException e) {} - - // Get the image's color model - return pg.getColorModel().hasAlpha(); - } - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/data/databases/CommonDBExtractor.java b/src/org/gcube/contentmanagement/graphtools/data/databases/CommonDBExtractor.java deleted file mode 100644 index 502351e..0000000 --- a/src/org/gcube/contentmanagement/graphtools/data/databases/CommonDBExtractor.java +++ /dev/null @@ -1,207 +0,0 @@ -package org.gcube.contentmanagement.graphtools.data.databases; - -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericDBExtractor; -import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable; -import org.gcube.contentmanagement.graphtools.core.filters.Filter; -import org.gcube.contentmanagement.graphtools.data.BigSamplesTable; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; -import org.hibernate.SessionFactory; - -public class CommonDBExtractor implements GenericDBExtractor { - - SessionFactory dbSession; - - public CommonDBExtractor(SessionFactory DbSession) { - - dbSession = DbSession; - } - - // produces a mono-dimensional table - public SamplesTable getMonoDimTable(String table, String column) { - BigSamplesTable monoSamples = new BigSamplesTable(); - - String query = "select distinct " + column + " from " + table + ";"; - - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - - for (Object result : resultSet) { - - try { - Double resultcount = (Double) result; - monoSamples.addSampleRow(column, resultcount); - } catch (Exception e) { - } - - } - - return monoSamples; - - } - - private String generateSelectionString(String... columns) { - - String delimiter = " , "; - StringBuffer sb = new StringBuffer(); - int numbOfColumns = columns.length; - - for (int i = 0; i < numbOfColumns; i++) { - - String column = columns[i]; - sb.append(column); - if (i < numbOfColumns - 1) - sb.append(delimiter); - } - - return sb.toString(); - } - - // SELECT field1,field5,field6,field3 FROM ts_161efa00_2c32_11df_b8b3_aa10916debe6 t where field3='Brown seaweeds'; - private static final String staticQuery = "select distinct %1$s from %2$s where (%3$s) "; - - private static final String staticOrderBy = " order by %1$s;"; - - private static final String descriptionQuery = "SELECT ordinal_position,column_name,data_type FROM information_schema.COLUMNS WHERE table_name ='%1$s'"; - - // produces a bi-dimensional table, where for a single x, multiple y are allowed - - // transforms db column types to java types - private Map getTypes(SessionFactory dbSession, String table) { - - HashMap typesMap = new HashMap(); - String queryDesc = String.format(descriptionQuery, table); - AnalysisLogger.getLogger().trace("Query for Description: " + queryDesc); - List resultSet = DatabaseFactory.executeSQLQuery(queryDesc, dbSession); - - for (Object result : resultSet) { - Object[] resultArray = (Object[]) result; - - String column_name = (String) resultArray[1]; - String data_type = (String) resultArray[2]; - typesMap.put(column_name, DataTypeRecognizer.transformTypeFromDB(data_type)); - - } - - return typesMap; - } - - private void updateSequence(SamplesTable sequence, Map columnTypes, String xDimension, Object[] row, int index) { - - // set correct x label and value - String xLabel = ""; - Double xValue = Double.valueOf(0); - - // String Type = columnTypes.get(xDimension); - - // if it is a string set the label as the entry and the value as the index - // NOTE OLD CODE: the x axis is always meant to be in linear scale, for now - /* - * if (Type.equals(BigDecimal.class.getName())) { xLabel = "" + row[0]; xValue = Double.valueOf(index); } else { - * - * xLabel = xDimension; xValue = Double.valueOf("" + row[0]); } - */ - xLabel = "" + row[0]; - xValue = Double.valueOf(index); - - String label = xLabel + ";"; - - // record the y value by taking the row 3 as label and row 2 as the value - String yLabel = ""; - Double yValue = Double.valueOf(0); - try { - yLabel += (String) row[3]; - yValue = Double.valueOf("" + row[2]); - } catch (Exception e) { - } - label += yLabel; - sequence.addSampleRow(label, xValue, yValue); - } - - // makes a query on the db and produces a HashMap of bi-dimensional tables including more parallel graphs - // each group represents a graph - // each graph has a xDimension and a label for each x (taken from x value) - // each x can have more than one y and label - // yValue is the column with y numeric values - // speciesColumn is the resulting column containing the labels for the ys - public Map getMultiDimTemporalTables(List filters, Filter YRangeFilter, String table, String xDimension, String groupDimension, String yValue, String speciesColumn, String... yFilters) { - - LinkedHashMap temporalSequence = new LinkedHashMap(); - - String selection = generateSelectionString(xDimension, groupDimension, yValue, speciesColumn); - - StringBuffer whereclause = new StringBuffer(); - int i = 0; - for (String columnFilter : yFilters) { - whereclause.append(speciesColumn + "='" + columnFilter + "'"); - if (i < yFilters.length - 1) - whereclause.append(" or "); - - i++; - } - - String query = String.format(staticQuery, selection, table, whereclause); - - if ((filters != null) && (filters.size() > 0)) { - query+="and ("; - int kk =0; - for (Filter f : filters) { - -// query += f.toString(); - if (kk==0){ - query += f.toString(""); - } - else - query += f.toString("or"); - kk++; - } - query+=")"; - } - - if (YRangeFilter != null) { - query += "and " + yValue + ">" + YRangeFilter.getFirstNumber() + " and " + yValue + "<" + YRangeFilter.getSecondNumber() + " "; - } - - query += String.format(staticOrderBy, xDimension); - - AnalysisLogger.getLogger().trace("Query: " + query); - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - // xdim, group, quantity, yLabel - - Map columnTypes = getTypes(dbSession, table); - if (resultSet != null) { - // for each result row - for (Object result : resultSet) { - // take the single row - Object[] resultArray = (Object[]) result; - // for each temporal dimension, generate a table - String temporalInfo = (String) resultArray[1]; - // take the table for the temporal sequence - SamplesTable sequence = temporalSequence.get(temporalInfo); - // if table does not exist create a new table and add it to the sequence - if (sequence == null) { - sequence = new BigSamplesTable(); - temporalSequence.put(temporalInfo, sequence); - // set fake elements as headers in order to understand the labels - for (String columnFilter : yFilters) { - sequence.addSampleRow("header;" + columnFilter, Double.NEGATIVE_INFINITY, 0); - } - } - // update the rows of the bidimensional table. Use as index the following: take the length of the sequence of values, then subtract the number of headers - updateSequence(sequence, columnTypes, xDimension, resultArray, sequence.getNumOfDataRows() - yFilters.length); - } - } - - return temporalSequence; - } - - public Map getMultiDimTemporalTables(String table, String xDimension, String groupDimension, String yValue, String speciesColumn, String... yFilters) { - return getMultiDimTemporalTables(null, null, table, xDimension, groupDimension, yValue, speciesColumn, yFilters); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfig.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfig.java deleted file mode 100644 index 131a4e8..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfig.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; - -public class ExampleExternalConfig { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("root"); -// conf.setDatabasePassword("password"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - stg.init("./cfg/",conf); - - stg.generateGraphs(3, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfigPostGress.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfigPostGress.java deleted file mode 100644 index 6ee7150..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfigPostGress.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; - -public class ExampleExternalConfigPostGress { - - - public static void main(String[] args) throws Exception{ - String table = "ts_7ab1d700_18d9_11e0_b703_c9d7e969ced7"; - String xDimension = "field3"; // the dates - String yDimension = "field5"; // values on Y - String groupDimension = "field2"; // group names - String speciesColumn = "field4"; // lines labels - String filter1 = "Perciformes"; - String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("lucio"); - conf.setDatabasePassword("d4science"); - conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseURL("jdbc:postgresql://dlib29.isti.cnr.it/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - stg.init("./cfg/",conf); - - stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfigPostGressProd.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfigPostGressProd.java deleted file mode 100644 index 374bbc9..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleExternalConfigPostGressProd.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class ExampleExternalConfigPostGressProd { - - - public static void main(String[] args) throws Exception{ - String table = "ts_c4bdfaa0_6c16_11e0_bb1f_fb760af5afc7"; - String xDimension = "field4"; // the dates - String yDimension = "field6"; // values on Y - String groupDimension = "field1"; // group names - String speciesColumn = "field2"; // lines labels - - String filter1 = "ABW"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - AnalysisLogger.setLogger("./cfg/ALog.properties"); - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("gcube1"); - conf.setDatabasePassword("d4science"); - conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseURL("jdbc:postgresql://node28.p.d4science.research-infrastructures.eu/timeseries"); - -// conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - stg.init("./cfg/",conf); - -// stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn,filter1); - - System.out.println(); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleFilters.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleFilters.java deleted file mode 100644 index 93db9ae..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleFilters.java +++ /dev/null @@ -1,51 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleFilters { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("root"); -// conf.setDatabasePassword("password"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - stg.init("./cfg/",conf); - -// stg.addColumnFilter("field4", "F"); - -// stg.addColumnFilter("field5", "2005","<"); - -// stg.addXRangeFilter("2004", "2005"); - -// stg.addYRangeFilter("100", "300"); - - stg.addColumnFilter("field1", "Africa - Inland waters"); - stg.addColumnFilter("field1", "Atlantic, Northeast"); - - GraphGroups gg = stg.generateGraphs(3, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - - HistogramGraph series = new HistogramGraph(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleFilters2.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleFilters2.java deleted file mode 100644 index 7b86399..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleFilters2.java +++ /dev/null @@ -1,52 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleFilters2 { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("root"); -// conf.setDatabasePassword("password"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - stg.init("./cfg/",conf); - -// stg.addColumnFilter("field4", "F"); - -// stg.addColumnFilter("field5", "2005","<"); - - //stg.addXRangeFilter("2004", "2005"); - - stg.addYRangeFilter("5600000", "7000000"); - - /* - stg.addColumnFilter("field1", "Africa - Inland waters"); - stg.addColumnFilter("field1", "Atlantic, Northeast"); - */ - GraphGroups gg = stg.generateGraphs(3, table, xDimension, yDimension, groupDimension, speciesColumn, filter1); - - HistogramGraph series = new HistogramGraph(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleMeanVariance.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleMeanVariance.java deleted file mode 100644 index d8de623..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleMeanVariance.java +++ /dev/null @@ -1,45 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import java.util.List; -import java.util.Map; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.GaussianDistributionGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; -import org.jfree.data.function.NormalDistributionFunction2D; - -public class ExampleMeanVariance { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - - Map> normalsmap = GaussianDistributionGraph.graphs2Normals(gg); - - //show normals - for (String key:normalsmap.keySet()){ - List normals = normalsmap.get(key); - System.out.println("Means and Variances for distribution named: "+key); - for (NormalDistributionFunction2D gaussian:normals){ - System.out.print("("+gaussian.getMean()+" ; "+gaussian.getStandardDeviation()+") "); - } - System.out.println(); - } - -// AnalysisLogger.getLogger().debug("Generated! "+normalsmap); - - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExamplePostGressLocalRadar.java b/src/org/gcube/contentmanagement/graphtools/examples/ExamplePostGressLocalRadar.java deleted file mode 100644 index 4bec670..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExamplePostGressLocalRadar.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExamplePostGressLocalRadar { - - - public static void main(String[] args) throws Exception{ - - AnalysisLogger.setLogger("./cfg/ALog.properties"); - - String table = "ts_3bdaf790_edbe_11e0_93e3_f6a9821baa29"; - String xDimension = "field2"; // the dates - String yDimension = "field4"; // values on Y - String groupDimension = "field0"; // group names - String speciesColumn = "field3"; // lines labels - String filter2 = "Perciformes"; - String filter1 = "Boregadus"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("gcube"); - conf.setDatabasePassword("d4science2"); - conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseURL("jdbc:postgresql://dbtest.next.research-infrastructures.eu/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - - stg.init("./cfg/",conf); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1); - - RadarGraph radar = new RadarGraph(""); - radar.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleStaticConfig.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleStaticConfig.java deleted file mode 100644 index 9604b28..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleStaticConfig.java +++ /dev/null @@ -1,29 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleStaticConfig { - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; - StatisticsGenerator stg = new StatisticsGenerator(); - - stg.init("./cfg/"); - - - GraphGroups gg = stg.generateGraphs(3, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - - RadarGraph radar = new RadarGraph(""); - radar.renderGraphGroup(gg); - } - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/ExampleStringGraphData.java b/src/org/gcube/contentmanagement/graphtools/examples/ExampleStringGraphData.java deleted file mode 100644 index bd6ff43..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/ExampleStringGraphData.java +++ /dev/null @@ -1,50 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -import com.thoughtworks.xstream.XStream; -import com.thoughtworks.xstream.io.xml.DomDriver; - -public class ExampleStringGraphData { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("root"); -// conf.setDatabasePassword("password"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - stg.init("./cfg/",conf); - - - //String generation - String ggs = stg.generateStringGraphs(100, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - System.out.println(ggs); - //String rebuilding - XStream xStream = new XStream(new DomDriver()); - GraphGroups gg = (GraphGroups) xStream.fromXML(ggs); - - //graph plot - HistogramGraph series = new HistogramGraph(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleDataBuiltGraph.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleDataBuiltGraph.java deleted file mode 100644 index 83a6119..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleDataBuiltGraph.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.data.GraphSamplesTable; -import org.gcube.contentmanagement.graphtools.data.conversions.GraphConverter2D; -import org.gcube.contentmanagement.graphtools.plotting.graphs.TransectLineGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - - - -public class ExampleDataBuiltGraph { - - - public static void main(String[] args) throws Exception{ - - - - - ArrayList l = new ArrayList(); - ArrayList v = new ArrayList(); - ArrayList stationaryLables = new ArrayList(); - - for (int i=0;i<10;i++){ - double d= 10*Math.random(); - v.add(d); - l.add("value "+i); - if (d>5){ - stationaryLables.add("statpoint"+i); - } - } - GraphSamplesTable gts = new GraphSamplesTable("Random Series",l,v,true); - GraphData grd = new GraphData(gts.getGraph(), false); - GraphGroups graphgroups = new GraphGroups(); - graphgroups.addGraph("Distribution", grd); - - - GraphConverter2D.anotateStationaryPoints(graphgroups,stationaryLables); - - GraphConverter2D.getStationaryPoints(grd); - - TransectLineGraph series = new TransectLineGraph(""); - series.renderGraphGroup(graphgroups); - - -// series.renderGraphGroup(gg); - - } - - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleDerivative.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleDerivative.java deleted file mode 100644 index 084c11e..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleDerivative.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.data.conversions.GraphConverter2D; -import org.gcube.contentmanagement.graphtools.plotting.graphs.TransectLineGraph; -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - - - -public class ExampleDerivative { - - - public static void main(String[] args) throws Exception{ - - String table = "ts_a904da30_b4fc_11df_800d_bcef80d51986"; - String xDimension = "field1"; - String yDimension = "field4"; - String groupDimension = "field2"; - String speciesColumn = "field3"; - String filter1 = "Toluene"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, speciesColumn, speciesColumn, filter1); - - /* - for (String key:gg.getGraphs().keySet()){ - GraphData graph = gg.getGraphs().get(key); - - //for each series - int trends = graph.getData().size(); - int yvalues = graph.getData().get(0).getEntries().size(); - System.out.println("yvalues "+yvalues); - System.out.println("trends "+trends); - for (int i=0;i image = series.renderGraphGroupImage(width,height,gg); - - Image singleimage = image.get(1); - - - BufferedImage bimage = ImageTools.toBufferedImage(singleimage); - - XStream xStream = new XStream(new DomDriver()); - - String xmlimage = xStream.toXML(singleimage); - - System.out.println(xmlimage); - - File outputfile = new File("saved.png"); - - ImageIO.write(bimage, "png", outputfile); - */ - - }catch(Exception e){ - - e.printStackTrace(); - - } - - - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleLines.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleLines.java deleted file mode 100644 index b50ba7f..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleLines.java +++ /dev/null @@ -1,33 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.LineGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleLines { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - - - - - StatisticsGenerator stg = new StatisticsGenerator(); - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - - LineGraph series = new LineGraph(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleNormalDistribution.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleNormalDistribution.java deleted file mode 100644 index b14455a..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleNormalDistribution.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.GaussianDistributionGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleNormalDistribution { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter2 = "Brown seaweeds"; - String filter1 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1); - - GaussianDistributionGraph pie = new GaussianDistributionGraph(""); - pie.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleNumericSeries.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleNumericSeries.java deleted file mode 100644 index 420db2d..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleNumericSeries.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.NumericSeriesGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleNumericSeries { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - - NumericSeriesGraph series = new NumericSeriesGraph(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExamplePie.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExamplePie.java deleted file mode 100644 index 142430b..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExamplePie.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.PieGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExamplePie { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - - PieGraph pie = new PieGraph(""); - pie.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleRadar.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleRadar.java deleted file mode 100644 index b67dbf7..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleRadar.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleRadar { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter2); - - RadarGraph radar = new RadarGraph(""); - radar.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleScatter.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleScatter.java deleted file mode 100644 index 5f98357..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleScatter.java +++ /dev/null @@ -1,30 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.ScatterGraphNumeric; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleScatter { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1); - -// ScatterGraphGeneric series = new ScatterGraphGeneric(""); - ScatterGraphNumeric series = new ScatterGraphNumeric(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleTimeSeries.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleTimeSeries.java deleted file mode 100644 index 034ce69..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleTimeSeries.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleTimeSeries { - - - public static void main(String[] args) throws Exception{ - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1); - - TimeSeriesGraph series = new TimeSeriesGraph(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleTimeSeries2.java b/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleTimeSeries2.java deleted file mode 100644 index 307ff2c..0000000 --- a/src/org/gcube/contentmanagement/graphtools/examples/graphsTypes/ExampleTimeSeries2.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.graphtools.examples.graphsTypes; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -public class ExampleTimeSeries2 { - - - public static void main(String[] args) throws Exception{ - String table = "ts_a904da30_b4fc_11df_800d_bcef80d51986"; - String xDimension = "field1"; - String yDimension = "field4"; - String groupDimension = "field2"; - String speciesColumn = "field3"; - String filter1 = "Toluene"; - String filter2 = "River eels"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1); - - TimeSeriesGraph series = new TimeSeriesGraph(""); - series.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/demo/PieChartDemo1.java b/src/org/gcube/contentmanagement/graphtools/plotting/demo/PieChartDemo1.java deleted file mode 100644 index e7132b8..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/demo/PieChartDemo1.java +++ /dev/null @@ -1,143 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.demo; - -import javax.swing.JPanel; - -import org.jfree.chart.ChartFactory; -import org.jfree.chart.ChartPanel; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.plot.PiePlot; -import org.jfree.data.general.DefaultPieDataset; -import org.jfree.data.general.PieDataset; -import org.jfree.ui.ApplicationFrame; -import org.jfree.ui.RefineryUtilities; - -/** - * A simple demonstration application showing how to create a pie chart using - * data from a {@link DefaultPieDataset}. - */ -public class PieChartDemo1 extends ApplicationFrame { - - /** - * Default constructor. - * - * @param title the frame title. - */ - public PieChartDemo1(String title) { - super(title); - setContentPane(createDemoPanel()); - } - - /** - * Creates a sample dataset. - * - * @return A sample dataset. - */ - private static PieDataset createDataset() { - DefaultPieDataset dataset = new DefaultPieDataset(); - dataset.setValue("One", new Double(43.2)); - dataset.setValue("Two", new Double(10.0)); - dataset.setValue("Three", new Double(27.5)); - dataset.setValue("Four", new Double(17.5)); - dataset.setValue("Five", new Double(11.0)); - dataset.setValue("Six", new Double(19.4)); - return dataset; - } - - /** - * Creates a chart. - * - * @param dataset the dataset. - * - * @return A chart. - */ - private static JFreeChart createChart(PieDataset dataset) { - - JFreeChart chart = ChartFactory.createPieChart( - "Pie Chart Demo 1", // chart title - dataset, // data - true, // include legend - true, - false - ); - - PiePlot plot = (PiePlot) chart.getPlot(); - plot.setSectionOutlinesVisible(false); - plot.setNoDataMessage("No data available"); - - return chart; - - } - - /** - * Creates a panel for the demo (used by SuperDemo.java). - * - * @return A panel. - */ - public static JPanel createDemoPanel() { - JFreeChart chart = createChart(createDataset()); - return new ChartPanel(chart); - } - - /** - * Starting point for the demonstration application. - * - * @param args ignored. - */ - public static void main(String[] args) { - - // ****************************************************************** - // More than 150 demo applications are included with the JFreeChart - // Developer Guide...for more information, see: - // - // > http://www.object-refinery.com/jfreechart/guide.html - // - // ****************************************************************** - - PieChartDemo1 demo = new PieChartDemo1("Pie Chart Demo 1"); - demo.pack(); - RefineryUtilities.centerFrameOnScreen(demo); - demo.setVisible(true); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebDemo1.java b/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebDemo1.java deleted file mode 100644 index d613427..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebDemo1.java +++ /dev/null @@ -1,143 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.demo; - -import java.awt.Graphics2D; -import java.awt.geom.Rectangle2D; -import java.awt.image.BufferedImage; - -import javax.swing.JPanel; - -import org.jfree.chart.ChartPanel; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.plot.SpiderWebPlot; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.data.general.DefaultPieDataset; -import org.jfree.ui.ApplicationFrame; -import org.jfree.ui.RefineryUtilities; - -/** - * A simple demonstration application showing how to create a pie chart using - * data from a {@link DefaultPieDataset}. - */ -public class SpiderWebDemo1 extends ApplicationFrame { - - /** - * Default constructor. - * - * @param title the frame title. - */ - public SpiderWebDemo1(String title) { - super(title); - setContentPane(createDemoPanel()); - } - - /** - * Creates a sample dataset. - * - * @return A sample dataset. - */ - private static Dataset createDataset() { - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - dataset.addValue(35.0, "S1", "C1"); - dataset.addValue(45.0, "S1", "C2"); - dataset.addValue(55.0, "S1", "C3"); - dataset.addValue(15.0, "S1", "C4"); - dataset.addValue(25.0, "S1", "C5"); - dataset.addValue(39.0, "S2", "C1"); - dataset.addValue(20.0, "S2", "C2"); - dataset.addValue(34.0, "S2", "C3"); - dataset.addValue(30.0, "S2", "C4"); - dataset.addValue(13.0, "S2", "C5"); - return dataset; - } - - /** - * Creates a chart. - * - * @param dataset the dataset. - * - * @return A chart. - */ - private static JFreeChart createChart(Dataset dataset) { - - SpiderWebPlot plot = new SpiderWebPlot((DefaultCategoryDataset)dataset); - JFreeChart chart = new JFreeChart(plot); - return chart; - - } - - /** - * Creates a panel for the demo (used by SuperDemo.java). - * - * @return A panel. - */ - public static JPanel createDemoPanel() { - JFreeChart chart = createChart(createDataset()); - JPanel jp = new ChartPanel(chart); - return jp; - } - - /** - * Starting point for the demonstration application. - * - * @param args ignored. - */ - public static void main(String[] args) { - - // ****************************************************************** - // More than 150 demo applications are included with the JFreeChart - // Developer Guide...for more information, see: - // - // > http://www.object-refinery.com/jfreechart/guide.html - // - // ****************************************************************** - - SpiderWebDemo1 demo = new SpiderWebDemo1("Web Chart Demo 1"); - demo.pack(); - RefineryUtilities.centerFrameOnScreen(demo); - - demo.setVisible(true); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebPlot.java b/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebPlot.java deleted file mode 100644 index b6207fe..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebPlot.java +++ /dev/null @@ -1,1668 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2008, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * SpiderWebPlot.java - * ------------------ - * (C) Copyright 2005-2008, by Heaps of Flavour Pty Ltd and Contributors. - * - * Company Info: http://www.i4-talent.com - * - * Original Author: Don Elliott; - * Contributor(s): David Gilbert (for Object Refinery Limited); - * Nina Jeliazkova; - * - * Changes - * ------- - * 28-Jan-2005 : First cut - missing a few features - still to do: - * - needs tooltips/URL/label generator functions - * - ticks on axes / background grid? - * 31-Jan-2005 : Renamed SpiderWebPlot, added label generator support, and - * reformatted for consistency with other source files in - * JFreeChart (DG); - * 20-Apr-2005 : Renamed CategoryLabelGenerator - * --> CategoryItemLabelGenerator (DG); - * 05-May-2005 : Updated draw() method parameters (DG); - * 10-Jun-2005 : Added equals() method and fixed serialization (DG); - * 16-Jun-2005 : Added default constructor and get/setDataset() - * methods (DG); - * ------------- JFREECHART 1.0.x --------------------------------------------- - * 05-Apr-2006 : Fixed bug preventing the display of zero values - see patch - * 1462727 (DG); - * 05-Apr-2006 : Added support for mouse clicks, tool tips and URLs - see patch - * 1463455 (DG); - * 01-Jun-2006 : Fix bug 1493199, NullPointerException when drawing with null - * info (DG); - * 05-Feb-2007 : Added attributes for axis stroke and paint, while fixing - * bug 1651277, and implemented clone() properly (DG); - * 06-Feb-2007 : Changed getPlotValue() to protected, as suggested in bug - * 1605202 (DG); - * 05-Mar-2007 : Restore clip region correctly (see bug 1667750) (DG); - * 18-May-2007 : Set dataset for LegendItem (DG); - * 02-Jun-2008 : Fixed bug with chart entities using TableOrder.BY_COLUMN (DG); - * 02-Jun-2008 : Fixed bug with null dataset (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.demo; - -import java.awt.AlphaComposite; -import java.awt.BasicStroke; -import java.awt.Color; -import java.awt.Composite; -import java.awt.Font; -import java.awt.Graphics2D; -import java.awt.Paint; -import java.awt.Polygon; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.font.FontRenderContext; -import java.awt.font.LineMetrics; -import java.awt.geom.Arc2D; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Line2D; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.util.Iterator; -import java.util.List; - -import org.jfree.chart.LegendItem; -import org.jfree.chart.LegendItemCollection; -import org.jfree.chart.entity.CategoryItemEntity; -import org.jfree.chart.entity.EntityCollection; -import org.jfree.chart.event.PlotChangeEvent; -import org.jfree.chart.labels.CategoryItemLabelGenerator; -import org.jfree.chart.labels.CategoryToolTipGenerator; -import org.jfree.chart.labels.StandardCategoryItemLabelGenerator; -import org.jfree.chart.plot.DrawingSupplier; -import org.jfree.chart.plot.Plot; -import org.jfree.chart.plot.PlotRenderingInfo; -import org.jfree.chart.plot.PlotState; -import org.jfree.chart.urls.CategoryURLGenerator; -import org.jfree.data.category.CategoryDataset; -import org.jfree.data.general.DatasetChangeEvent; -import org.jfree.data.general.DatasetUtilities; -import org.jfree.io.SerialUtilities; -import org.jfree.ui.RectangleInsets; -import org.jfree.util.ObjectUtilities; -import org.jfree.util.PaintList; -import org.jfree.util.PaintUtilities; -import org.jfree.util.Rotation; -import org.jfree.util.ShapeUtilities; -import org.jfree.util.StrokeList; -import org.jfree.util.TableOrder; - -/** - * A plot that displays data from a {@link CategoryDataset} in the form of a - * "spider web". Multiple series can be plotted on the same axis to allow - * easy comparison. This plot doesn't support negative values at present. - */ -public class SpiderWebPlot extends Plot implements Cloneable, Serializable { - - /** For serialization. */ - private static final long serialVersionUID = -5376340422031599463L; - - /** The default head radius percent (currently 1%). */ - public static final double DEFAULT_HEAD = 0.01; - - /** The default axis label gap (currently 10%). */ - public static final double DEFAULT_AXIS_LABEL_GAP = 0.10; - - /** The default interior gap. */ - public static final double DEFAULT_INTERIOR_GAP = 0.25; - - /** The maximum interior gap (currently 40%). */ - public static final double MAX_INTERIOR_GAP = 0.40; - - /** The default starting angle for the radar chart axes. */ - public static final double DEFAULT_START_ANGLE = 90.0; - - /** The default series label font. */ - public static final Font DEFAULT_LABEL_FONT = new Font("SansSerif", - Font.PLAIN, 10); - - /** The default series label paint. */ - public static final Paint DEFAULT_LABEL_PAINT = Color.black; - - /** The default series label background paint. */ - public static final Paint DEFAULT_LABEL_BACKGROUND_PAINT - = new Color(255, 255, 192); - - /** The default series label outline paint. */ - public static final Paint DEFAULT_LABEL_OUTLINE_PAINT = Color.black; - - /** The default series label outline stroke. */ - public static final Stroke DEFAULT_LABEL_OUTLINE_STROKE - = new BasicStroke(0.5f); - - /** The default series label shadow paint. */ - public static final Paint DEFAULT_LABEL_SHADOW_PAINT = Color.lightGray; - - /** - * The default maximum value plotted - forces the plot to evaluate - * the maximum from the data passed in - */ - public static final double DEFAULT_MAX_VALUE = -1.0; - - /** The head radius as a percentage of the available drawing area. */ - protected double headPercent; - - /** The space left around the outside of the plot as a percentage. */ - private double interiorGap; - - /** The gap between the labels and the axes as a %age of the radius. */ - private double axisLabelGap; - - /** - * The paint used to draw the axis lines. - * - * @since 1.0.4 - */ - private transient Paint axisLinePaint; - - /** - * The stroke used to draw the axis lines. - * - * @since 1.0.4 - */ - private transient Stroke axisLineStroke; - - /** The dataset. */ - private CategoryDataset dataset; - - /** The maximum value we are plotting against on each category axis */ - private double maxValue; - - /** - * The data extract order (BY_ROW or BY_COLUMN). This denotes whether - * the data series are stored in rows (in which case the category names are - * derived from the column keys) or in columns (in which case the category - * names are derived from the row keys). - */ - private TableOrder dataExtractOrder; - - /** The starting angle. */ - private double startAngle; - - /** The direction for drawing the radar axis & plots. */ - private Rotation direction; - - /** The legend item shape. */ - private transient Shape legendItemShape; - - /** The paint for ALL series (overrides list). */ - private transient Paint seriesPaint; - - /** The series paint list. */ - private PaintList seriesPaintList; - - /** The base series paint (fallback). */ - private transient Paint baseSeriesPaint; - - /** The outline paint for ALL series (overrides list). */ - private transient Paint seriesOutlinePaint; - - /** The series outline paint list. */ - private PaintList seriesOutlinePaintList; - - /** The base series outline paint (fallback). */ - private transient Paint baseSeriesOutlinePaint; - - /** The outline stroke for ALL series (overrides list). */ - private transient Stroke seriesOutlineStroke; - - /** The series outline stroke list. */ - private StrokeList seriesOutlineStrokeList; - - /** The base series outline stroke (fallback). */ - private transient Stroke baseSeriesOutlineStroke; - - /** The font used to display the category labels. */ - private Font labelFont; - - /** The color used to draw the category labels. */ - private transient Paint labelPaint; - - /** The label generator. */ - private CategoryItemLabelGenerator labelGenerator; - - /** controls if the web polygons are filled or not */ - private boolean webFilled = true; - - /** A tooltip generator for the plot (null permitted). */ - private CategoryToolTipGenerator toolTipGenerator; - - /** A URL generator for the plot (null permitted). */ - private CategoryURLGenerator urlGenerator; - - /** - * Creates a default plot with no dataset. - */ - public SpiderWebPlot() { - this(null); - } - - /** - * Creates a new spider web plot with the given dataset, with each row - * representing a series. - * - * @param dataset the dataset (null permitted). - */ - public SpiderWebPlot(CategoryDataset dataset) { - this(dataset, TableOrder.BY_ROW); - } - - /** - * Creates a new spider web plot with the given dataset. - * - * @param dataset the dataset. - * @param extract controls how data is extracted ({@link TableOrder#BY_ROW} - * or {@link TableOrder#BY_COLUMN}). - */ - public SpiderWebPlot(CategoryDataset dataset, TableOrder extract) { - super(); - if (extract == null) { - throw new IllegalArgumentException("Null 'extract' argument."); - } - this.dataset = dataset; - if (dataset != null) { - dataset.addChangeListener(this); - } - - this.dataExtractOrder = extract; - this.headPercent = DEFAULT_HEAD; - this.axisLabelGap = DEFAULT_AXIS_LABEL_GAP; - this.axisLinePaint = Color.black; - this.axisLineStroke = new BasicStroke(1.0f); - - this.interiorGap = DEFAULT_INTERIOR_GAP; - this.startAngle = DEFAULT_START_ANGLE; - this.direction = Rotation.CLOCKWISE; - this.maxValue = DEFAULT_MAX_VALUE; - - this.seriesPaint = null; - this.seriesPaintList = new PaintList(); - this.baseSeriesPaint = null; - - this.seriesOutlinePaint = null; - this.seriesOutlinePaintList = new PaintList(); - this.baseSeriesOutlinePaint = DEFAULT_OUTLINE_PAINT; - - this.seriesOutlineStroke = null; - this.seriesOutlineStrokeList = new StrokeList(); - this.baseSeriesOutlineStroke = DEFAULT_OUTLINE_STROKE; - - this.labelFont = DEFAULT_LABEL_FONT; - this.labelPaint = DEFAULT_LABEL_PAINT; - this.labelGenerator = new StandardCategoryItemLabelGenerator(); - - this.legendItemShape = DEFAULT_LEGEND_ITEM_CIRCLE; - } - - /** - * Returns a short string describing the type of plot. - * - * @return The plot type. - */ - public String getPlotType() { - // return localizationResources.getString("Radar_Plot"); - return ("Spider Web Plot"); - } - - /** - * Returns the dataset. - * - * @return The dataset (possibly null). - * - * @see #setDataset(CategoryDataset) - */ - public CategoryDataset getDataset() { - return this.dataset; - } - - /** - * Sets the dataset used by the plot and sends a {@link PlotChangeEvent} - * to all registered listeners. - * - * @param dataset the dataset (null permitted). - * - * @see #getDataset() - */ - public void setDataset(CategoryDataset dataset) { - // if there is an existing dataset, remove the plot from the list of - // change listeners... - if (this.dataset != null) { - this.dataset.removeChangeListener(this); - } - - // set the new dataset, and register the chart as a change listener... - this.dataset = dataset; - if (dataset != null) { - setDatasetGroup(dataset.getGroup()); - dataset.addChangeListener(this); - } - - // send a dataset change event to self to trigger plot change event - datasetChanged(new DatasetChangeEvent(this, dataset)); - } - - /** - * Method to determine if the web chart is to be filled. - * - * @return A boolean. - * - * @see #setWebFilled(boolean) - */ - public boolean isWebFilled() { - return this.webFilled; - } - - /** - * Sets the webFilled flag and sends a {@link PlotChangeEvent} to all - * registered listeners. - * - * @param flag the flag. - * - * @see #isWebFilled() - */ - public void setWebFilled(boolean flag) { - this.webFilled = flag; - fireChangeEvent(); - } - - /** - * Returns the data extract order (by row or by column). - * - * @return The data extract order (never null). - * - * @see #setDataExtractOrder(TableOrder) - */ - public TableOrder getDataExtractOrder() { - return this.dataExtractOrder; - } - - /** - * Sets the data extract order (by row or by column) and sends a - * {@link PlotChangeEvent}to all registered listeners. - * - * @param order the order (null not permitted). - * - * @throws IllegalArgumentException if order is - * null. - * - * @see #getDataExtractOrder() - */ - public void setDataExtractOrder(TableOrder order) { - if (order == null) { - throw new IllegalArgumentException("Null 'order' argument"); - } - this.dataExtractOrder = order; - fireChangeEvent(); - } - - /** - * Returns the head percent. - * - * @return The head percent. - * - * @see #setHeadPercent(double) - */ - public double getHeadPercent() { - return this.headPercent; - } - - /** - * Sets the head percent and sends a {@link PlotChangeEvent} to all - * registered listeners. - * - * @param percent the percent. - * - * @see #getHeadPercent() - */ - public void setHeadPercent(double percent) { - this.headPercent = percent; - fireChangeEvent(); - } - - /** - * Returns the start angle for the first radar axis. - *
- * This is measured in degrees starting from 3 o'clock (Java Arc2D default) - * and measuring anti-clockwise. - * - * @return The start angle. - * - * @see #setStartAngle(double) - */ - public double getStartAngle() { - return this.startAngle; - } - - /** - * Sets the starting angle and sends a {@link PlotChangeEvent} to all - * registered listeners. - *

- * The initial default value is 90 degrees, which corresponds to 12 o'clock. - * A value of zero corresponds to 3 o'clock... this is the encoding used by - * Java's Arc2D class. - * - * @param angle the angle (in degrees). - * - * @see #getStartAngle() - */ - public void setStartAngle(double angle) { - this.startAngle = angle; - fireChangeEvent(); - } - - /** - * Returns the maximum value any category axis can take. - * - * @return The maximum value. - * - * @see #setMaxValue(double) - */ - public double getMaxValue() { - return this.maxValue; - } - - /** - * Sets the maximum value any category axis can take and sends - * a {@link PlotChangeEvent} to all registered listeners. - * - * @param value the maximum value. - * - * @see #getMaxValue() - */ - public void setMaxValue(double value) { - this.maxValue = value; - fireChangeEvent(); - } - - /** - * Returns the direction in which the radar axes are drawn - * (clockwise or anti-clockwise). - * - * @return The direction (never null). - * - * @see #setDirection(Rotation) - */ - public Rotation getDirection() { - return this.direction; - } - - /** - * Sets the direction in which the radar axes are drawn and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param direction the direction (null not permitted). - * - * @see #getDirection() - */ - public void setDirection(Rotation direction) { - if (direction == null) { - throw new IllegalArgumentException("Null 'direction' argument."); - } - this.direction = direction; - fireChangeEvent(); - } - - /** - * Returns the interior gap, measured as a percentage of the available - * drawing space. - * - * @return The gap (as a percentage of the available drawing space). - * - * @see #setInteriorGap(double) - */ - public double getInteriorGap() { - return this.interiorGap; - } - - /** - * Sets the interior gap and sends a {@link PlotChangeEvent} to all - * registered listeners. This controls the space between the edges of the - * plot and the plot area itself (the region where the axis labels appear). - * - * @param percent the gap (as a percentage of the available drawing space). - * - * @see #getInteriorGap() - */ - public void setInteriorGap(double percent) { - if ((percent < 0.0) || (percent > MAX_INTERIOR_GAP)) { - throw new IllegalArgumentException( - "Percentage outside valid range."); - } - if (this.interiorGap != percent) { - this.interiorGap = percent; - fireChangeEvent(); - } - } - - /** - * Returns the axis label gap. - * - * @return The axis label gap. - * - * @see #setAxisLabelGap(double) - */ - public double getAxisLabelGap() { - return this.axisLabelGap; - } - - /** - * Sets the axis label gap and sends a {@link PlotChangeEvent} to all - * registered listeners. - * - * @param gap the gap. - * - * @see #getAxisLabelGap() - */ - public void setAxisLabelGap(double gap) { - this.axisLabelGap = gap; - fireChangeEvent(); - } - - /** - * Returns the paint used to draw the axis lines. - * - * @return The paint used to draw the axis lines (never null). - * - * @see #setAxisLinePaint(Paint) - * @see #getAxisLineStroke() - * @since 1.0.4 - */ - public Paint getAxisLinePaint() { - return this.axisLinePaint; - } - - /** - * Sets the paint used to draw the axis lines and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param paint the paint (null not permitted). - * - * @see #getAxisLinePaint() - * @since 1.0.4 - */ - public void setAxisLinePaint(Paint paint) { - if (paint == null) { - throw new IllegalArgumentException("Null 'paint' argument."); - } - this.axisLinePaint = paint; - fireChangeEvent(); - } - - /** - * Returns the stroke used to draw the axis lines. - * - * @return The stroke used to draw the axis lines (never null). - * - * @see #setAxisLineStroke(Stroke) - * @see #getAxisLinePaint() - * @since 1.0.4 - */ - public Stroke getAxisLineStroke() { - return this.axisLineStroke; - } - - /** - * Sets the stroke used to draw the axis lines and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param stroke the stroke (null not permitted). - * - * @see #getAxisLineStroke() - * @since 1.0.4 - */ - public void setAxisLineStroke(Stroke stroke) { - if (stroke == null) { - throw new IllegalArgumentException("Null 'stroke' argument."); - } - this.axisLineStroke = stroke; - fireChangeEvent(); - } - - //// SERIES PAINT ///////////////////////// - - /** - * Returns the paint for ALL series in the plot. - * - * @return The paint (possibly null). - * - * @see #setSeriesPaint(Paint) - */ - public Paint getSeriesPaint() { - return this.seriesPaint; - } - - /** - * Sets the paint for ALL series in the plot. If this is set to null - * , then a list of paints is used instead (to allow different colors - * to be used for each series of the radar group). - * - * @param paint the paint (null permitted). - * - * @see #getSeriesPaint() - */ - public void setSeriesPaint(Paint paint) { - this.seriesPaint = paint; - fireChangeEvent(); - } - - /** - * Returns the paint for the specified series. - * - * @param series the series index (zero-based). - * - * @return The paint (never null). - * - * @see #setSeriesPaint(int, Paint) - */ - public Paint getSeriesPaint(int series) { - - // return the override, if there is one... - if (this.seriesPaint != null) { - return this.seriesPaint; - } - - // otherwise look up the paint list - Paint result = this.seriesPaintList.getPaint(series); - if (result == null) { - DrawingSupplier supplier = getDrawingSupplier(); - if (supplier != null) { - Paint p = supplier.getNextPaint(); - this.seriesPaintList.setPaint(series, p); - result = p; - } - else { - result = this.baseSeriesPaint; - } - } - return result; - - } - - /** - * Sets the paint used to fill a series of the radar and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param series the series index (zero-based). - * @param paint the paint (null permitted). - * - * @see #getSeriesPaint(int) - */ - public void setSeriesPaint(int series, Paint paint) { - this.seriesPaintList.setPaint(series, paint); - fireChangeEvent(); - } - - /** - * Returns the base series paint. This is used when no other paint is - * available. - * - * @return The paint (never null). - * - * @see #setBaseSeriesPaint(Paint) - */ - public Paint getBaseSeriesPaint() { - return this.baseSeriesPaint; - } - - /** - * Sets the base series paint. - * - * @param paint the paint (null not permitted). - * - * @see #getBaseSeriesPaint() - */ - public void setBaseSeriesPaint(Paint paint) { - if (paint == null) { - throw new IllegalArgumentException("Null 'paint' argument."); - } - this.baseSeriesPaint = paint; - fireChangeEvent(); - } - - //// SERIES OUTLINE PAINT //////////////////////////// - - /** - * Returns the outline paint for ALL series in the plot. - * - * @return The paint (possibly null). - */ - public Paint getSeriesOutlinePaint() { - return this.seriesOutlinePaint; - } - - /** - * Sets the outline paint for ALL series in the plot. If this is set to - * null, then a list of paints is used instead (to allow - * different colors to be used for each series). - * - * @param paint the paint (null permitted). - */ - public void setSeriesOutlinePaint(Paint paint) { - this.seriesOutlinePaint = paint; - fireChangeEvent(); - } - - /** - * Returns the paint for the specified series. - * - * @param series the series index (zero-based). - * - * @return The paint (never null). - */ - public Paint getSeriesOutlinePaint(int series) { - // return the override, if there is one... - if (this.seriesOutlinePaint != null) { - return this.seriesOutlinePaint; - } - // otherwise look up the paint list - Paint result = this.seriesOutlinePaintList.getPaint(series); - if (result == null) { - result = this.baseSeriesOutlinePaint; - } - return result; - } - - /** - * Sets the paint used to fill a series of the radar and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param series the series index (zero-based). - * @param paint the paint (null permitted). - */ - public void setSeriesOutlinePaint(int series, Paint paint) { - this.seriesOutlinePaintList.setPaint(series, paint); - fireChangeEvent(); - } - - /** - * Returns the base series paint. This is used when no other paint is - * available. - * - * @return The paint (never null). - */ - public Paint getBaseSeriesOutlinePaint() { - return this.baseSeriesOutlinePaint; - } - - /** - * Sets the base series paint. - * - * @param paint the paint (null not permitted). - */ - public void setBaseSeriesOutlinePaint(Paint paint) { - if (paint == null) { - throw new IllegalArgumentException("Null 'paint' argument."); - } - this.baseSeriesOutlinePaint = paint; - fireChangeEvent(); - } - - //// SERIES OUTLINE STROKE ///////////////////// - - /** - * Returns the outline stroke for ALL series in the plot. - * - * @return The stroke (possibly null). - */ - public Stroke getSeriesOutlineStroke() { - return this.seriesOutlineStroke; - } - - /** - * Sets the outline stroke for ALL series in the plot. If this is set to - * null, then a list of paints is used instead (to allow - * different colors to be used for each series). - * - * @param stroke the stroke (null permitted). - */ - public void setSeriesOutlineStroke(Stroke stroke) { - this.seriesOutlineStroke = stroke; - fireChangeEvent(); - } - - /** - * Returns the stroke for the specified series. - * - * @param series the series index (zero-based). - * - * @return The stroke (never null). - */ - public Stroke getSeriesOutlineStroke(int series) { - - // return the override, if there is one... - if (this.seriesOutlineStroke != null) { - return this.seriesOutlineStroke; - } - - // otherwise look up the paint list - Stroke result = this.seriesOutlineStrokeList.getStroke(series); - if (result == null) { - result = this.baseSeriesOutlineStroke; - } - return result; - - } - - /** - * Sets the stroke used to fill a series of the radar and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param series the series index (zero-based). - * @param stroke the stroke (null permitted). - */ - public void setSeriesOutlineStroke(int series, Stroke stroke) { - this.seriesOutlineStrokeList.setStroke(series, stroke); - fireChangeEvent(); - } - - /** - * Returns the base series stroke. This is used when no other stroke is - * available. - * - * @return The stroke (never null). - */ - public Stroke getBaseSeriesOutlineStroke() { - return this.baseSeriesOutlineStroke; - } - - /** - * Sets the base series stroke. - * - * @param stroke the stroke (null not permitted). - */ - public void setBaseSeriesOutlineStroke(Stroke stroke) { - if (stroke == null) { - throw new IllegalArgumentException("Null 'stroke' argument."); - } - this.baseSeriesOutlineStroke = stroke; - fireChangeEvent(); - } - - /** - * Returns the shape used for legend items. - * - * @return The shape (never null). - * - * @see #setLegendItemShape(Shape) - */ - public Shape getLegendItemShape() { - return this.legendItemShape; - } - - /** - * Sets the shape used for legend items and sends a {@link PlotChangeEvent} - * to all registered listeners. - * - * @param shape the shape (null not permitted). - * - * @see #getLegendItemShape() - */ - public void setLegendItemShape(Shape shape) { - if (shape == null) { - throw new IllegalArgumentException("Null 'shape' argument."); - } - this.legendItemShape = shape; - fireChangeEvent(); - } - - /** - * Returns the series label font. - * - * @return The font (never null). - * - * @see #setLabelFont(Font) - */ - public Font getLabelFont() { - return this.labelFont; - } - - /** - * Sets the series label font and sends a {@link PlotChangeEvent} to all - * registered listeners. - * - * @param font the font (null not permitted). - * - * @see #getLabelFont() - */ - public void setLabelFont(Font font) { - if (font == null) { - throw new IllegalArgumentException("Null 'font' argument."); - } - this.labelFont = font; - fireChangeEvent(); - } - - /** - * Returns the series label paint. - * - * @return The paint (never null). - * - * @see #setLabelPaint(Paint) - */ - public Paint getLabelPaint() { - return this.labelPaint; - } - - /** - * Sets the series label paint and sends a {@link PlotChangeEvent} to all - * registered listeners. - * - * @param paint the paint (null not permitted). - * - * @see #getLabelPaint() - */ - public void setLabelPaint(Paint paint) { - if (paint == null) { - throw new IllegalArgumentException("Null 'paint' argument."); - } - this.labelPaint = paint; - fireChangeEvent(); - } - - /** - * Returns the label generator. - * - * @return The label generator (never null). - * - * @see #setLabelGenerator(CategoryItemLabelGenerator) - */ - public CategoryItemLabelGenerator getLabelGenerator() { - return this.labelGenerator; - } - - /** - * Sets the label generator and sends a {@link PlotChangeEvent} to all - * registered listeners. - * - * @param generator the generator (null not permitted). - * - * @see #getLabelGenerator() - */ - public void setLabelGenerator(CategoryItemLabelGenerator generator) { - if (generator == null) { - throw new IllegalArgumentException("Null 'generator' argument."); - } - this.labelGenerator = generator; - } - - /** - * Returns the tool tip generator for the plot. - * - * @return The tool tip generator (possibly null). - * - * @see #setToolTipGenerator(CategoryToolTipGenerator) - * - * @since 1.0.2 - */ - public CategoryToolTipGenerator getToolTipGenerator() { - return this.toolTipGenerator; - } - - /** - * Sets the tool tip generator for the plot and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param generator the generator (null permitted). - * - * @see #getToolTipGenerator() - * - * @since 1.0.2 - */ - public void setToolTipGenerator(CategoryToolTipGenerator generator) { - this.toolTipGenerator = generator; - fireChangeEvent(); - } - - /** - * Returns the URL generator for the plot. - * - * @return The URL generator (possibly null). - * - * @see #setURLGenerator(CategoryURLGenerator) - * - * @since 1.0.2 - */ - public CategoryURLGenerator getURLGenerator() { - return this.urlGenerator; - } - - /** - * Sets the URL generator for the plot and sends a - * {@link PlotChangeEvent} to all registered listeners. - * - * @param generator the generator (null permitted). - * - * @see #getURLGenerator() - * - * @since 1.0.2 - */ - public void setURLGenerator(CategoryURLGenerator generator) { - this.urlGenerator = generator; - fireChangeEvent(); - } - - /** - * Returns a collection of legend items for the radar chart. - * - * @return The legend items. - */ - public LegendItemCollection getLegendItems() { - LegendItemCollection result = new LegendItemCollection(); - if (getDataset() == null) { - return result; - } - - List keys = null; - if (this.dataExtractOrder == TableOrder.BY_ROW) { - keys = this.dataset.getRowKeys(); - } - else if (this.dataExtractOrder == TableOrder.BY_COLUMN) { - keys = this.dataset.getColumnKeys(); - } - - if (keys != null) { - int series = 0; - Iterator iterator = keys.iterator(); - Shape shape = getLegendItemShape(); - - while (iterator.hasNext()) { - String label = iterator.next().toString(); - String description = label; - - Paint paint = getSeriesPaint(series); - Paint outlinePaint = getSeriesOutlinePaint(series); - Stroke stroke = getSeriesOutlineStroke(series); - LegendItem item = new LegendItem(label, description, - null, null, shape, paint, stroke, outlinePaint); - item.setDataset(getDataset()); - result.add(item); - series++; - } - } - - return result; - } - - /** - * Returns a cartesian point from a polar angle, length and bounding box - * - * @param bounds the area inside which the point needs to be. - * @param angle the polar angle, in degrees. - * @param length the relative length. Given in percent of maximum extend. - * - * @return The cartesian point. - */ - protected Point2D getWebPoint(Rectangle2D bounds, - double angle, double length) { - - double angrad = Math.toRadians(angle); - double x = Math.cos(angrad) * length * bounds.getWidth() / 2; - double y = -Math.sin(angrad) * length * bounds.getHeight() / 2; - - return new Point2D.Double(bounds.getX() + x + bounds.getWidth() / 2, - bounds.getY() + y + bounds.getHeight() / 2); - } - - /** - * Draws the plot on a Java 2D graphics device (such as the screen or a - * printer). - * - * @param g2 the graphics device. - * @param area the area within which the plot should be drawn. - * @param anchor the anchor point (null permitted). - * @param parentState the state from the parent plot, if there is one. - * @param info collects info about the drawing. - */ - public void draw(Graphics2D g2, Rectangle2D area, Point2D anchor, - PlotState parentState, PlotRenderingInfo info) { - - // adjust for insets... - RectangleInsets insets = getInsets(); - insets.trim(area); - - if (info != null) { - info.setPlotArea(area); - info.setDataArea(area); - } - - drawBackground(g2, area); - drawOutline(g2, area); - - Shape savedClip = g2.getClip(); - - g2.clip(area); - Composite originalComposite = g2.getComposite(); - g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, - getForegroundAlpha())); - - if (!DatasetUtilities.isEmptyOrNull(this.dataset)) { - int seriesCount = 0, catCount = 0; - - if (this.dataExtractOrder == TableOrder.BY_ROW) { - seriesCount = this.dataset.getRowCount(); - catCount = this.dataset.getColumnCount(); - } - else { - seriesCount = this.dataset.getColumnCount(); - catCount = this.dataset.getRowCount(); - } - - // ensure we have a maximum value to use on the axes - if (this.maxValue == DEFAULT_MAX_VALUE) - calculateMaxValue(seriesCount, catCount); - - // Next, setup the plot area - - // adjust the plot area by the interior spacing value - - double gapHorizontal = area.getWidth() * getInteriorGap(); - double gapVertical = area.getHeight() * getInteriorGap(); - - double X = area.getX() + gapHorizontal / 2; - double Y = area.getY() + gapVertical / 2; - double W = area.getWidth() - gapHorizontal; - double H = area.getHeight() - gapVertical; - - double headW = area.getWidth() * this.headPercent; - double headH = area.getHeight() * this.headPercent; - - // make the chart area a square - double min = Math.min(W, H) / 2; - X = (X + X + W) / 2 - min; - Y = (Y + Y + H) / 2 - min; - W = 2 * min; - H = 2 * min; - - Point2D centre = new Point2D.Double(X + W / 2, Y + H / 2); - Rectangle2D radarArea = new Rectangle2D.Double(X, Y, W, H); - - // draw the axis and category label - for (int cat = 0; cat < catCount; cat++) { - double angle = getStartAngle() - + (getDirection().getFactor() * cat * 360 / catCount); - - Point2D endPoint = getWebPoint(radarArea, angle, 1); - // 1 = end of axis - Line2D line = new Line2D.Double(centre, endPoint); - g2.setPaint(this.axisLinePaint); - g2.setStroke(this.axisLineStroke); - g2.draw(line); - drawLabel(g2, radarArea, 0.0, cat, angle, 360.0 / catCount); - } - - // Now actually plot each of the series polygons.. - for (int series = 0; series < seriesCount; series++) { - drawRadarPoly(g2, radarArea, centre, info, series, catCount, - headH, headW); - } - } - else { - drawNoDataMessage(g2, area); - } - g2.setClip(savedClip); - g2.setComposite(originalComposite); - drawOutline(g2, area); - } - - /** - * loop through each of the series to get the maximum value - * on each category axis - * - * @param seriesCount the number of series - * @param catCount the number of categories - */ - private void calculateMaxValue(int seriesCount, int catCount) { - double v = 0; - Number nV = null; - - for (int seriesIndex = 0; seriesIndex < seriesCount; seriesIndex++) { - for (int catIndex = 0; catIndex < catCount; catIndex++) { - nV = getPlotValue(seriesIndex, catIndex); - if (nV != null) { - v = nV.doubleValue(); - if (v > this.maxValue) { - this.maxValue = v; - } - } - } - } - } - - /** - * Draws a radar plot polygon. - * - * @param g2 the graphics device. - * @param plotArea the area we are plotting in (already adjusted). - * @param centre the centre point of the radar axes - * @param info chart rendering info. - * @param series the series within the dataset we are plotting - * @param catCount the number of categories per radar plot - * @param headH the data point height - * @param headW the data point width - */ - protected void drawRadarPoly(Graphics2D g2, - Rectangle2D plotArea, - Point2D centre, - PlotRenderingInfo info, - int series, int catCount, - double headH, double headW) { - - Polygon polygon = new Polygon(); - - EntityCollection entities = null; - if (info != null) { - entities = info.getOwner().getEntityCollection(); - } - - // plot the data... - for (int cat = 0; cat < catCount; cat++) { - - Number dataValue = getPlotValue(series, cat); - - if (dataValue != null) { - double value = dataValue.doubleValue(); - - if (value >= 0) { // draw the polygon series... - - // Finds our starting angle from the centre for this axis - - double angle = getStartAngle() - + (getDirection().getFactor() * cat * 360 / catCount); - - // The following angle calc will ensure there isn't a top - // vertical axis - this may be useful if you don't want any - // given criteria to 'appear' move important than the - // others.. - // + (getDirection().getFactor() - // * (cat + 0.5) * 360 / catCount); - - // find the point at the appropriate distance end point - // along the axis/angle identified above and add it to the - // polygon - - Point2D point = getWebPoint(plotArea, angle, - value / this.maxValue); - polygon.addPoint((int) point.getX(), (int) point.getY()); - - // put an elipse at the point being plotted.. - - Paint paint = getSeriesPaint(series); - Paint outlinePaint = getSeriesOutlinePaint(series); - Stroke outlineStroke = getSeriesOutlineStroke(series); - - Ellipse2D head = new Ellipse2D.Double(point.getX() - - headW / 2, point.getY() - headH / 2, headW, - headH); - g2.setPaint(paint); - g2.fill(head); - g2.setStroke(outlineStroke); - g2.setPaint(outlinePaint); - g2.draw(head); - - if (entities != null) { - int row = 0; int col = 0; - if (this.dataExtractOrder == TableOrder.BY_ROW) { - row = series; - col = cat; - } - else { - row = cat; - col = series; - } - String tip = null; - if (this.toolTipGenerator != null) { - tip = this.toolTipGenerator.generateToolTip( - this.dataset, row, col); - } - - String url = null; - if (this.urlGenerator != null) { - url = this.urlGenerator.generateURL(this.dataset, - row, col); - } - - Shape area = new Rectangle( - (int) (point.getX() - headW), - (int) (point.getY() - headH), - (int) (headW * 2), (int) (headH * 2)); - CategoryItemEntity entity = new CategoryItemEntity( - area, tip, url, this.dataset, - this.dataset.getRowKey(row), - this.dataset.getColumnKey(col)); - entities.add(entity); - } - - } - } - } - // Plot the polygon - - Paint paint = getSeriesPaint(series); - g2.setPaint(paint); - g2.setStroke(getSeriesOutlineStroke(series)); - g2.draw(polygon); - - // Lastly, fill the web polygon if this is required - - if (this.webFilled) { - g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, - 0.1f)); - g2.fill(polygon); - g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, - getForegroundAlpha())); - } - } - - /** - * Returns the value to be plotted at the interseries of the - * series and the category. This allows us to plot - * BY_ROW or BY_COLUMN which basically is just - * reversing the definition of the categories and data series being - * plotted. - * - * @param series the series to be plotted. - * @param cat the category within the series to be plotted. - * - * @return The value to be plotted (possibly null). - * - * @see #getDataExtractOrder() - */ - protected Number getPlotValue(int series, int cat) { - Number value = null; - if (this.dataExtractOrder == TableOrder.BY_ROW) { - value = this.dataset.getValue(series, cat); - } - else if (this.dataExtractOrder == TableOrder.BY_COLUMN) { - value = this.dataset.getValue(cat, series); - } - return value; - } - - /** - * Draws the label for one axis. - * - * @param g2 the graphics device. - * @param plotArea the plot area - * @param value the value of the label (ignored). - * @param cat the category (zero-based index). - * @param startAngle the starting angle. - * @param extent the extent of the arc. - */ - protected void drawLabel(Graphics2D g2, Rectangle2D plotArea, double value, - int cat, double startAngle, double extent) { - FontRenderContext frc = g2.getFontRenderContext(); - - String label = null; - if (this.dataExtractOrder == TableOrder.BY_ROW) { - // if series are in rows, then the categories are the column keys - label = this.labelGenerator.generateColumnLabel(this.dataset, cat); - } - else { - // if series are in columns, then the categories are the row keys - label = this.labelGenerator.generateRowLabel(this.dataset, cat); - } - - Rectangle2D labelBounds = getLabelFont().getStringBounds(label, frc); - LineMetrics lm = getLabelFont().getLineMetrics(label, frc); - double ascent = lm.getAscent(); - - Point2D labelLocation = calculateLabelLocation(labelBounds, ascent, - plotArea, startAngle); - - Composite saveComposite = g2.getComposite(); - - g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, - 1.0f)); - g2.setPaint(getLabelPaint()); - g2.setFont(getLabelFont()); - g2.drawString(label, (float) labelLocation.getX(), - (float) labelLocation.getY()); - g2.setComposite(saveComposite); - } - - /** - * Returns the location for a label - * - * @param labelBounds the label bounds. - * @param ascent the ascent (height of font). - * @param plotArea the plot area - * @param startAngle the start angle for the pie series. - * - * @return The location for a label. - */ - protected Point2D calculateLabelLocation(Rectangle2D labelBounds, - double ascent, - Rectangle2D plotArea, - double startAngle) - { - Arc2D arc1 = new Arc2D.Double(plotArea, startAngle, 0, Arc2D.OPEN); - Point2D point1 = arc1.getEndPoint(); - - double deltaX = -(point1.getX() - plotArea.getCenterX()) - * this.axisLabelGap; - double deltaY = -(point1.getY() - plotArea.getCenterY()) - * this.axisLabelGap; - - double labelX = point1.getX() - deltaX; - double labelY = point1.getY() - deltaY; - - if (labelX < plotArea.getCenterX()) { - labelX -= labelBounds.getWidth(); - } - - if (labelX == plotArea.getCenterX()) { - labelX -= labelBounds.getWidth() / 2; - } - - if (labelY > plotArea.getCenterY()) { - labelY += ascent; - } - - return new Point2D.Double(labelX, labelY); - } - - /** - * Tests this plot for equality with an arbitrary object. - * - * @param obj the object (null permitted). - * - * @return A boolean. - */ - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof SpiderWebPlot)) { - return false; - } - if (!super.equals(obj)) { - return false; - } - SpiderWebPlot that = (SpiderWebPlot) obj; - if (!this.dataExtractOrder.equals(that.dataExtractOrder)) { - return false; - } - if (this.headPercent != that.headPercent) { - return false; - } - if (this.interiorGap != that.interiorGap) { - return false; - } - if (this.startAngle != that.startAngle) { - return false; - } - if (!this.direction.equals(that.direction)) { - return false; - } - if (this.maxValue != that.maxValue) { - return false; - } - if (this.webFilled != that.webFilled) { - return false; - } - if (this.axisLabelGap != that.axisLabelGap) { - return false; - } - if (!PaintUtilities.equal(this.axisLinePaint, that.axisLinePaint)) { - return false; - } - if (!this.axisLineStroke.equals(that.axisLineStroke)) { - return false; - } - if (!ShapeUtilities.equal(this.legendItemShape, that.legendItemShape)) { - return false; - } - if (!PaintUtilities.equal(this.seriesPaint, that.seriesPaint)) { - return false; - } - if (!this.seriesPaintList.equals(that.seriesPaintList)) { - return false; - } - if (!PaintUtilities.equal(this.baseSeriesPaint, that.baseSeriesPaint)) { - return false; - } - if (!PaintUtilities.equal(this.seriesOutlinePaint, - that.seriesOutlinePaint)) { - return false; - } - if (!this.seriesOutlinePaintList.equals(that.seriesOutlinePaintList)) { - return false; - } - if (!PaintUtilities.equal(this.baseSeriesOutlinePaint, - that.baseSeriesOutlinePaint)) { - return false; - } - if (!ObjectUtilities.equal(this.seriesOutlineStroke, - that.seriesOutlineStroke)) { - return false; - } - if (!this.seriesOutlineStrokeList.equals( - that.seriesOutlineStrokeList)) { - return false; - } - if (!this.baseSeriesOutlineStroke.equals( - that.baseSeriesOutlineStroke)) { - return false; - } - if (!this.labelFont.equals(that.labelFont)) { - return false; - } - if (!PaintUtilities.equal(this.labelPaint, that.labelPaint)) { - return false; - } - if (!this.labelGenerator.equals(that.labelGenerator)) { - return false; - } - if (!ObjectUtilities.equal(this.toolTipGenerator, - that.toolTipGenerator)) { - return false; - } - if (!ObjectUtilities.equal(this.urlGenerator, - that.urlGenerator)) { - return false; - } - return true; - } - - /** - * Returns a clone of this plot. - * - * @return A clone of this plot. - * - * @throws CloneNotSupportedException if the plot cannot be cloned for - * any reason. - */ - public Object clone() throws CloneNotSupportedException { - SpiderWebPlot clone = (SpiderWebPlot) super.clone(); - clone.legendItemShape = ShapeUtilities.clone(this.legendItemShape); - clone.seriesPaintList = (PaintList) this.seriesPaintList.clone(); - clone.seriesOutlinePaintList - = (PaintList) this.seriesOutlinePaintList.clone(); - clone.seriesOutlineStrokeList - = (StrokeList) this.seriesOutlineStrokeList.clone(); - return clone; - } - - /** - * Provides serialization support. - * - * @param stream the output stream. - * - * @throws IOException if there is an I/O error. - */ - private void writeObject(ObjectOutputStream stream) throws IOException { - stream.defaultWriteObject(); - - SerialUtilities.writeShape(this.legendItemShape, stream); - SerialUtilities.writePaint(this.seriesPaint, stream); - SerialUtilities.writePaint(this.baseSeriesPaint, stream); - SerialUtilities.writePaint(this.seriesOutlinePaint, stream); - SerialUtilities.writePaint(this.baseSeriesOutlinePaint, stream); - SerialUtilities.writeStroke(this.seriesOutlineStroke, stream); - SerialUtilities.writeStroke(this.baseSeriesOutlineStroke, stream); - SerialUtilities.writePaint(this.labelPaint, stream); - SerialUtilities.writePaint(this.axisLinePaint, stream); - SerialUtilities.writeStroke(this.axisLineStroke, stream); - } - - /** - * Provides serialization support. - * - * @param stream the input stream. - * - * @throws IOException if there is an I/O error. - * @throws ClassNotFoundException if there is a classpath problem. - */ - private void readObject(ObjectInputStream stream) throws IOException, - ClassNotFoundException { - stream.defaultReadObject(); - - this.legendItemShape = SerialUtilities.readShape(stream); - this.seriesPaint = SerialUtilities.readPaint(stream); - this.baseSeriesPaint = SerialUtilities.readPaint(stream); - this.seriesOutlinePaint = SerialUtilities.readPaint(stream); - this.baseSeriesOutlinePaint = SerialUtilities.readPaint(stream); - this.seriesOutlineStroke = SerialUtilities.readStroke(stream); - this.baseSeriesOutlineStroke = SerialUtilities.readStroke(stream); - this.labelPaint = SerialUtilities.readPaint(stream); - this.axisLinePaint = SerialUtilities.readPaint(stream); - this.axisLineStroke = SerialUtilities.readStroke(stream); - if (this.dataset != null) { - this.dataset.addChangeListener(this); - } - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebPlotTests.jav b/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebPlotTests.jav deleted file mode 100644 index 3408d58..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/demo/SpiderWebPlotTests.jav +++ /dev/null @@ -1,372 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2008, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ----------------------- - * SpiderWebPlotTests.java - * ----------------------- - * (C) Copyright 2005-2008, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): -; - * - * Changes - * ------- - * 10-Jun-2005 : Version 1 (DG); - * 01-Jun-2006 : Added testDrawWithNullInfo() method (DG); - * 05-Feb-2007 : Added more checks to testCloning (DG); - * - */ - -package org.gcube.portlets.user.timeseriesportlets.rapidminer.plotting; - -import java.awt.BasicStroke; -import java.awt.Color; -import java.awt.Font; -import java.awt.GradientPaint; -import java.awt.Graphics2D; -import java.awt.Rectangle; -import java.awt.geom.Rectangle2D; -import java.awt.image.BufferedImage; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.ObjectInput; -import java.io.ObjectInputStream; -import java.io.ObjectOutput; -import java.io.ObjectOutputStream; -import java.text.DecimalFormat; - -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; - -import org.jfree.chart.JFreeChart; -import org.jfree.chart.labels.StandardCategoryItemLabelGenerator; -import org.jfree.chart.labels.StandardCategoryToolTipGenerator; -import org.jfree.chart.plot.SpiderWebPlot; -import org.jfree.chart.urls.StandardCategoryURLGenerator; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.util.Rotation; -import org.jfree.util.TableOrder; - -/** - * Tests for the {@link SpiderWebPlot} class. - */ -public class SpiderWebPlotTests extends TestCase { - - /** - * Returns the tests as a test suite. - * - * @return The test suite. - */ - public static Test suite() { - return new TestSuite(SpiderWebPlotTests.class); - } - - /** - * Constructs a new set of tests. - * - * @param name the name of the tests. - */ - public SpiderWebPlotTests(String name) { - super(name); - } - - /** - * Some checks for the equals() method. - */ - public void testEquals() { - SpiderWebPlot p1 = new SpiderWebPlot(new DefaultCategoryDataset()); - SpiderWebPlot p2 = new SpiderWebPlot(new DefaultCategoryDataset()); - assertTrue(p1.equals(p2)); - assertTrue(p2.equals(p1)); - - // dataExtractOrder - p1.setDataExtractOrder(TableOrder.BY_COLUMN); - assertFalse(p1.equals(p2)); - p2.setDataExtractOrder(TableOrder.BY_COLUMN); - assertTrue(p1.equals(p2)); - - // headPercent - p1.setHeadPercent(0.321); - assertFalse(p1.equals(p2)); - p2.setHeadPercent(0.321); - assertTrue(p1.equals(p2)); - - // interiorGap - p1.setInteriorGap(0.123); - assertFalse(p1.equals(p2)); - p2.setInteriorGap(0.123); - assertTrue(p1.equals(p2)); - - // startAngle - p1.setStartAngle(0.456); - assertFalse(p1.equals(p2)); - p2.setStartAngle(0.456); - assertTrue(p1.equals(p2)); - - // direction - p1.setDirection(Rotation.ANTICLOCKWISE); - assertFalse(p1.equals(p2)); - p2.setDirection(Rotation.ANTICLOCKWISE); - assertTrue(p1.equals(p2)); - - // maxValue - p1.setMaxValue(123.4); - assertFalse(p1.equals(p2)); - p2.setMaxValue(123.4); - assertTrue(p1.equals(p2)); - - // legendItemShape - p1.setLegendItemShape(new Rectangle(1, 2, 3, 4)); - assertFalse(p1.equals(p2)); - p2.setLegendItemShape(new Rectangle(1, 2, 3, 4)); - assertTrue(p1.equals(p2)); - - // seriesPaint - p1.setSeriesPaint(new GradientPaint(1.0f, 2.0f, Color.red, - 3.0f, 4.0f, Color.white)); - assertFalse(p1.equals(p2)); - p2.setSeriesPaint(new GradientPaint(1.0f, 2.0f, Color.red, - 3.0f, 4.0f, Color.white)); - assertTrue(p1.equals(p2)); - - // seriesPaintList - p1.setSeriesPaint(1, new GradientPaint(1.0f, 2.0f, Color.yellow, - 3.0f, 4.0f, Color.white)); - assertFalse(p1.equals(p2)); - p2.setSeriesPaint(1, new GradientPaint(1.0f, 2.0f, Color.yellow, - 3.0f, 4.0f, Color.white)); - assertTrue(p1.equals(p2)); - - // baseSeriesPaint - p1.setBaseSeriesPaint(new GradientPaint(1.0f, 2.0f, Color.red, - 3.0f, 4.0f, Color.black)); - assertFalse(p1.equals(p2)); - p2.setBaseSeriesPaint(new GradientPaint(1.0f, 2.0f, Color.red, - 3.0f, 4.0f, Color.black)); - assertTrue(p1.equals(p2)); - - // seriesOutlinePaint - p1.setSeriesOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.blue, - 3.0f, 4.0f, Color.black)); - assertFalse(p1.equals(p2)); - p2.setSeriesOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.blue, - 3.0f, 4.0f, Color.black)); - assertTrue(p1.equals(p2)); - - // seriesOutlinePaintList - p1.setSeriesOutlinePaint(1, new GradientPaint(1.0f, 2.0f, Color.blue, - 3.0f, 4.0f, Color.green)); - assertFalse(p1.equals(p2)); - p2.setSeriesOutlinePaint(1, new GradientPaint(1.0f, 2.0f, Color.blue, - 3.0f, 4.0f, Color.green)); - assertTrue(p1.equals(p2)); - - // baseSeriesOutlinePaint - p1.setBaseSeriesOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.cyan, - 3.0f, 4.0f, Color.green)); - assertFalse(p1.equals(p2)); - p2.setBaseSeriesOutlinePaint(new GradientPaint(1.0f, 2.0f, Color.cyan, - 3.0f, 4.0f, Color.green)); - assertTrue(p1.equals(p2)); - - // seriesOutlineStroke - BasicStroke s = new BasicStroke(1.23f); - p1.setSeriesOutlineStroke(s); - assertFalse(p1.equals(p2)); - p2.setSeriesOutlineStroke(s); - assertTrue(p1.equals(p2)); - - // seriesOutlineStrokeList - p1.setSeriesOutlineStroke(1, s); - assertFalse(p1.equals(p2)); - p2.setSeriesOutlineStroke(1, s); - assertTrue(p1.equals(p2)); - - // baseSeriesOutlineStroke - p1.setBaseSeriesOutlineStroke(s); - assertFalse(p1.equals(p2)); - p2.setBaseSeriesOutlineStroke(s); - assertTrue(p1.equals(p2)); - - // webFilled - p1.setWebFilled(false); - assertFalse(p1.equals(p2)); - p2.setWebFilled(false); - assertTrue(p1.equals(p2)); - - // axisLabelGap - p1.setAxisLabelGap(0.11); - assertFalse(p1.equals(p2)); - p2.setAxisLabelGap(0.11); - assertTrue(p1.equals(p2)); - - // labelFont - p1.setLabelFont(new Font("Serif", Font.PLAIN, 9)); - assertFalse(p1.equals(p2)); - p2.setLabelFont(new Font("Serif", Font.PLAIN, 9)); - assertTrue(p1.equals(p2)); - - // labelPaint - p1.setLabelPaint(new GradientPaint(1.0f, 2.0f, Color.red, - 3.0f, 4.0f, Color.blue)); - assertFalse(p1.equals(p2)); - p2.setLabelPaint(new GradientPaint(1.0f, 2.0f, Color.red, - 3.0f, 4.0f, Color.blue)); - assertTrue(p1.equals(p2)); - - // labelGenerator - p1.setLabelGenerator(new StandardCategoryItemLabelGenerator("XYZ: {0}", - new DecimalFormat("0.000"))); - assertFalse(p1.equals(p2)); - p2.setLabelGenerator(new StandardCategoryItemLabelGenerator("XYZ: {0}", - new DecimalFormat("0.000"))); - assertTrue(p1.equals(p2)); - - // toolTipGenerator - p1.setToolTipGenerator(new StandardCategoryToolTipGenerator()); - assertFalse(p1.equals(p2)); - p2.setToolTipGenerator(new StandardCategoryToolTipGenerator()); - assertTrue(p1.equals(p2)); - - // urlGenerator - p1.setURLGenerator(new StandardCategoryURLGenerator()); - assertFalse(p1.equals(p2)); - p2.setURLGenerator(new StandardCategoryURLGenerator()); - assertTrue(p1.equals(p2)); - - // axisLinePaint - p1.setAxisLinePaint(Color.red); - assertFalse(p1.equals(p2)); - p2.setAxisLinePaint(Color.red); - assertTrue(p1.equals(p2)); - - // axisLineStroke - p1.setAxisLineStroke(new BasicStroke(1.1f)); - assertFalse(p1.equals(p2)); - p2.setAxisLineStroke(new BasicStroke(1.1f)); - assertTrue(p1.equals(p2)); - } - - /** - * Confirm that cloning works. - */ - public void testCloning() { - SpiderWebPlot p1 = new SpiderWebPlot(new DefaultCategoryDataset()); - Rectangle2D legendShape = new Rectangle2D.Double(1.0, 2.0, 3.0, 4.0); - p1.setLegendItemShape(legendShape); - SpiderWebPlot p2 = null; - try { - p2 = (SpiderWebPlot) p1.clone(); - } - catch (CloneNotSupportedException e) { - e.printStackTrace(); - } - assertTrue(p1 != p2); - assertTrue(p1.getClass() == p2.getClass()); - assertTrue(p1.equals(p2)); - - // change the legendItemShape - legendShape.setRect(4.0, 3.0, 2.0, 1.0); - assertFalse(p1.equals(p2)); - p2.setLegendItemShape(legendShape); - assertTrue(p1.equals(p2)); - - // change a series paint - p1.setSeriesPaint(1, Color.black); - assertFalse(p1.equals(p2)); - p2.setSeriesPaint(1, Color.black); - assertTrue(p1.equals(p2)); - - // change a series outline paint - p1.setSeriesOutlinePaint(0, Color.red); - assertFalse(p1.equals(p2)); - p2.setSeriesOutlinePaint(0, Color.red); - assertTrue(p1.equals(p2)); - - // change a series outline stroke - p1.setSeriesOutlineStroke(0, new BasicStroke(1.1f)); - assertFalse(p1.equals(p2)); - p2.setSeriesOutlineStroke(0, new BasicStroke(1.1f)); - assertTrue(p1.equals(p2)); - - } - - /** - * Serialize an instance, restore it, and check for equality. - */ - public void testSerialization() { - - SpiderWebPlot p1 = new SpiderWebPlot(new DefaultCategoryDataset()); - SpiderWebPlot p2 = null; - - try { - ByteArrayOutputStream buffer = new ByteArrayOutputStream(); - ObjectOutput out = new ObjectOutputStream(buffer); - out.writeObject(p1); - out.close(); - - ObjectInput in = new ObjectInputStream( - new ByteArrayInputStream(buffer.toByteArray())); - p2 = (SpiderWebPlot) in.readObject(); - in.close(); - } - catch (Exception e) { - e.printStackTrace(); - } - assertEquals(p1, p2); - - } - - /** - * Draws the chart with a null info object to make sure that no exceptions - * are thrown. - */ - public void testDrawWithNullInfo() { - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - dataset.addValue(35.0, "S1", "C1"); - dataset.addValue(45.0, "S1", "C2"); - dataset.addValue(55.0, "S1", "C3"); - dataset.addValue(15.0, "S1", "C4"); - dataset.addValue(25.0, "S1", "C5"); - SpiderWebPlot plot = new SpiderWebPlot(dataset); - JFreeChart chart = new JFreeChart(plot); - boolean success = false; - try { - BufferedImage image = new BufferedImage(200 , 100, - BufferedImage.TYPE_INT_RGB); - Graphics2D g2 = image.createGraphics(); - chart.draw(g2, new Rectangle2D.Double(0, 0, 200, 100), null, null); -// g2.dispose(); - success = true; - } - catch (Exception e) { - success = false; - } - assertTrue(success); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/demo/TestWeb.java b/src/org/gcube/contentmanagement/graphtools/plotting/demo/TestWeb.java deleted file mode 100644 index ce6b085..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/demo/TestWeb.java +++ /dev/null @@ -1,43 +0,0 @@ -package org.gcube.contentmanagement.graphtools.plotting.demo; - -import java.awt.Graphics2D; -import java.awt.geom.Rectangle2D; -import java.awt.image.BufferedImage; - -import org.jfree.chart.JFreeChart; -import org.jfree.chart.plot.SpiderWebPlot; -import org.jfree.data.category.DefaultCategoryDataset; - -public class TestWeb { - - /** - * @param args - */ - public static void main(String[] args) { - testDrawWithNullInfo(); - } - public static void testDrawWithNullInfo() { - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - dataset.addValue(35.0, "S1", "C1"); - dataset.addValue(45.0, "S1", "C2"); - dataset.addValue(55.0, "S1", "C3"); - dataset.addValue(15.0, "S1", "C4"); - dataset.addValue(25.0, "S1", "C5"); - SpiderWebPlot plot = new SpiderWebPlot(dataset); - JFreeChart chart = new JFreeChart(plot); - boolean success = false; - try { - BufferedImage image = new BufferedImage(200 , 100, - BufferedImage.TYPE_INT_RGB); - Graphics2D g2 = image.createGraphics(); - chart.draw(g2, new Rectangle2D.Double(0, 0, 200, 100), null, null); -// g2.dispose(); - - success = true; - } - catch (Exception e) { - success = false; - } - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/demo/TimeSeriesChartDemo1.java b/src/org/gcube/contentmanagement/graphtools/plotting/demo/TimeSeriesChartDemo1.java deleted file mode 100644 index 9cf28c3..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/demo/TimeSeriesChartDemo1.java +++ /dev/null @@ -1,219 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2009, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------------- - * TimeSeriesChartDemo1.java - * ------------------------- - * (C) Copyright 2003-2009, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.demo; - -import java.awt.Color; -import java.text.SimpleDateFormat; - -import javax.swing.JPanel; - -import org.jfree.chart.ChartFactory; -import org.jfree.chart.ChartPanel; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.DateAxis; -import org.jfree.chart.plot.XYPlot; -import org.jfree.chart.renderer.xy.XYItemRenderer; -import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; -import org.jfree.data.time.Month; -import org.jfree.data.time.TimeSeries; -import org.jfree.data.time.TimeSeriesCollection; -import org.jfree.data.xy.XYDataset; -import org.jfree.ui.ApplicationFrame; -import org.jfree.ui.RectangleInsets; -import org.jfree.ui.RefineryUtilities; - -/** - * An example of a time series chart. For the most part, default settings are - * used, except that the renderer is modified to show filled shapes (as well as - * lines) at each data point. - */ -public class TimeSeriesChartDemo1 extends ApplicationFrame { - - /** - * A demonstration application showing how to create a simple time series - * chart. This example uses monthly data. - * - * @param title the frame title. - */ - public TimeSeriesChartDemo1(String title) { - super(title); - ChartPanel chartPanel = (ChartPanel) createDemoPanel(); - chartPanel.setPreferredSize(new java.awt.Dimension(500, 270)); - setContentPane(chartPanel); - } - - /** - * Creates a chart. - * - * @param dataset a dataset. - * - * @return A chart. - */ - private static JFreeChart createChart(XYDataset dataset) { - - JFreeChart chart = ChartFactory.createTimeSeriesChart( - "Legal & General Unit Trust Prices", // title - "Date", // x-axis label - "Price Per Unit", // y-axis label - dataset, // data - true, // create legend? - true, // generate tooltips? - false // generate URLs? - ); - - chart.setBackgroundPaint(Color.white); - - XYPlot plot = (XYPlot) chart.getPlot(); - plot.setBackgroundPaint(Color.lightGray); - plot.setDomainGridlinePaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - plot.setAxisOffset(new RectangleInsets(5.0, 5.0, 5.0, 5.0)); - plot.setDomainCrosshairVisible(true); - plot.setRangeCrosshairVisible(true); - - XYItemRenderer r = plot.getRenderer(); - if (r instanceof XYLineAndShapeRenderer) { - XYLineAndShapeRenderer renderer = (XYLineAndShapeRenderer) r; - renderer.setBaseShapesVisible(true); - renderer.setBaseShapesFilled(true); - renderer.setDrawSeriesLineAsPath(true); - } - - DateAxis axis = (DateAxis) plot.getDomainAxis(); - axis.setDateFormatOverride(new SimpleDateFormat("MMM-yyyy")); - - return chart; - - } - - /** - * Creates a dataset, consisting of two series of monthly data. - * - * @return The dataset. - */ - private static XYDataset createDataset() { - - TimeSeries s1 = new TimeSeries("L&G European Index Trust"); - s1.add(new Month(2, 2001), 181.8); - s1.add(new Month(3, 2001), 167.3); - s1.add(new Month(4, 2001), 153.8); - s1.add(new Month(5, 2001), 167.6); - s1.add(new Month(6, 2001), 158.8); - s1.add(new Month(7, 2001), 148.3); - s1.add(new Month(8, 2001), 153.9); - s1.add(new Month(9, 2001), 142.7); - s1.add(new Month(10, 2001), 123.2); - s1.add(new Month(11, 2001), 131.8); - s1.add(new Month(12, 2001), 139.6); - s1.add(new Month(1, 2002), 142.9); - s1.add(new Month(2, 2002), 138.7); - s1.add(new Month(3, 2002), 137.3); - s1.add(new Month(4, 2002), 143.9); - s1.add(new Month(5, 2002), 139.8); - s1.add(new Month(6, 2002), 137.0); - s1.add(new Month(7, 2002), 132.8); - - TimeSeries s2 = new TimeSeries("L&G UK Index Trust"); - s2.add(new Month(2, 2001), 129.6); - s2.add(new Month(3, 2001), 123.2); - s2.add(new Month(4, 2001), 117.2); - s2.add(new Month(5, 2001), 124.1); - s2.add(new Month(6, 2001), 122.6); - s2.add(new Month(7, 2001), 119.2); - s2.add(new Month(8, 2001), 116.5); - s2.add(new Month(9, 2001), 112.7); - s2.add(new Month(10, 2001), 101.5); - s2.add(new Month(11, 2001), 106.1); - s2.add(new Month(12, 2001), 110.3); - s2.add(new Month(1, 2002), 111.7); - s2.add(new Month(2, 2002), 111.0); - s2.add(new Month(3, 2002), 109.6); - s2.add(new Month(4, 2002), 113.2); - s2.add(new Month(5, 2002), 111.6); - s2.add(new Month(6, 2002), 108.8); - s2.add(new Month(7, 2002), 101.6); - - // ****************************************************************** - // More than 150 demo applications are included with the JFreeChart - // Developer Guide...for more information, see: - // - // > http://www.object-refinery.com/jfreechart/guide.html - // - // ****************************************************************** - - TimeSeriesCollection dataset = new TimeSeriesCollection(); - dataset.addSeries(s1); - dataset.addSeries(s2); - - return dataset; - - } - - /** - * Creates a panel for the demo (used by SuperDemo.java). - * - * @return A panel. - */ - public static JPanel createDemoPanel() { - JFreeChart chart = createChart(createDataset()); - ChartPanel panel = new ChartPanel(chart); - panel.setFillZoomRectangle(true); - panel.setMouseWheelEnabled(true); - return panel; - } - - /** - * Starting point for the demonstration application. - * - * @param args ignored. - */ - public static void main(String[] args) { - - TimeSeriesChartDemo1 demo = new TimeSeriesChartDemo1( - "Time Series Chart Demo 1"); - demo.pack(); - RefineryUtilities.centerFrameOnScreen(demo); - demo.setVisible(true); - - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/GaussianDistributionGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/GaussianDistributionGraph.java deleted file mode 100644 index bce4ed2..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/GaussianDistributionGraph.java +++ /dev/null @@ -1,208 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.BasicStroke; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.ValueAxis; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.plot.XYPlot; -import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; -import org.jfree.data.function.NormalDistributionFunction2D; -import org.jfree.data.general.Dataset; -import org.jfree.data.general.DatasetUtilities; -import org.jfree.data.xy.XYSeriesCollection; - -import com.thoughtworks.xstream.XStream; -import com.thoughtworks.xstream.io.xml.DomDriver; - -public class GaussianDistributionGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public GaussianDistributionGraph(String title) { - super(title); - } - - protected Dataset generateDataset() { - XYSeriesCollection xyseriescollection = new XYSeriesCollection(); - NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(0.0D, 1.0D); - org.jfree.data.xy.XYSeries xyseries = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d, -5.0999999999999996D, 5.0999999999999996D, 121, "N1"); - xyseriescollection.addSeries(xyseries); - NormalDistributionFunction2D normaldistributionfunction2d1 = new NormalDistributionFunction2D(0.0D, Math.sqrt(0.20000000000000001D)); - org.jfree.data.xy.XYSeries xyseries1 = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d1, -5.0999999999999996D, 5.0999999999999996D, 121, "N2"); - xyseriescollection.addSeries(xyseries1); - NormalDistributionFunction2D normaldistributionfunction2d2 = new NormalDistributionFunction2D(0.0D, Math.sqrt(5D)); - org.jfree.data.xy.XYSeries xyseries2 = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d2, -5.0999999999999996D, 5.0999999999999996D, 121, "N3"); - xyseriescollection.addSeries(xyseries2); - NormalDistributionFunction2D normaldistributionfunction2d3 = new NormalDistributionFunction2D(-2D, Math.sqrt(0.5D)); - org.jfree.data.xy.XYSeries xyseries3 = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d3, -5.0999999999999996D, 5.0999999999999996D, 121, "N4"); - xyseriescollection.addSeries(xyseries3); - return xyseriescollection; - } - - protected JFreeChart createChart(Dataset dataset) { - - String label = "mean:"+mean+" variance:"+variance; - if (label.length()>30) - label = label.substring(0,30)+"..."; - - JFreeChart jfreechart = ChartFactory.createXYLineChart("", label, "", (XYSeriesCollection)dataset, PlotOrientation.VERTICAL, true, true, false); - XYPlot xyplot = (XYPlot)jfreechart.getPlot(); - xyplot.setDomainZeroBaselineVisible(true); - xyplot.setRangeZeroBaselineVisible(true); - xyplot.setDomainPannable(true); - xyplot.setRangePannable(true); - ValueAxis valueaxis = xyplot.getDomainAxis(); - valueaxis.setLowerMargin(0.0D); - valueaxis.setUpperMargin(0.0D); - XYLineAndShapeRenderer xylineandshaperenderer = (XYLineAndShapeRenderer)xyplot.getRenderer(); - xylineandshaperenderer.setDrawSeriesLineAsPath(true); - xylineandshaperenderer.setSeriesStroke(0, new BasicStroke(1.5F)); - - return jfreechart; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new GaussianDistributionGraph(title); - } - - - double mean; - double variance; - @Override - protected Dataset convert2Dataset(GraphData st) { - - List> pointslist = st.getData(); - - // NOTE: after the graph generation graphs are inverted in x and y - int numbOfSeries = pointslist.size(); - XYSeriesCollection xyseriescollection = new XYSeriesCollection(); - - if (numbOfSeries > 0) { - int numbOfPoints = pointslist.get(0).getEntries().size(); - //for each series - for (int s = 0; s < numbOfSeries; s++) { - //get label - String serieslabel = pointslist.get(s).getLabel(); - double maxRange = st.getMaxY().doubleValue(); - double minRange = st.getMinY().doubleValue(); - - //get doubles vector for performing mean and variance calculation - double [] points = MathFunctions.points2Double(pointslist,s,numbOfPoints); - mean = MathFunctions.mean(points); - variance = com.rapidminer.tools.math.MathFunctions.variance(points, Double.NEGATIVE_INFINITY); - - mean = Math.round(mean); - variance = Math.round(variance); - - if (variance==0) - variance = 0.1; - - AnalysisLogger.getLogger().debug("mean "+mean+" variance "+variance); - //build up normal distribution and add to the series - NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance); - //make the representation a bit longer - maxRange = maxRange*2; - org.jfree.data.xy.XYSeries xyseries = DatasetUtilities.sampleFunction2DToSeries(normaldistributionfunction2d, minRange, maxRange, 121, serieslabel); - - - - xyseriescollection.addSeries(xyseries); - } - } - return xyseriescollection; - } - - public static Map> graphs2Normals(GraphGroups gg){ - - Map> normalsMap = new HashMap>(); - for (String key:gg.getGraphs().keySet()) - { - - GraphData st = gg.getGraphs().get(key); - List> pointslist = st.getData(); - List normalsList = new ArrayList(); - - // NOTE: after the graph generation graphs are inverted in x and y - int numbOfSeries = pointslist.size(); - - if (numbOfSeries > 0) { - int numbOfPoints = pointslist.get(0).getEntries().size(); - //for each series - for (int s = 0; s < numbOfSeries; s++) { - //get doubles vector for performing mean and variance calculation - double [] points = MathFunctions.points2Double(pointslist,s,numbOfPoints); - double mean = MathFunctions.mean(points); - double variance = com.rapidminer.tools.math.MathFunctions.variance(points, Double.NEGATIVE_INFINITY); - - if (variance==0) - variance = 0.1; - - AnalysisLogger.getLogger().debug("mean "+mean+" variance "+variance); - //build up normal distribution and add to the series - NormalDistributionFunction2D normaldistributionfunction2d = new NormalDistributionFunction2D(mean, variance); - normalsList.add(normaldistributionfunction2d); - } - } - - normalsMap.put(key, normalsList); - } - - return normalsMap; - } - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/HistogramGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/HistogramGraph.java deleted file mode 100644 index 547f7d8..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/HistogramGraph.java +++ /dev/null @@ -1,131 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.Color; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.AxisLocation; -import org.jfree.chart.axis.CategoryAxis; -import org.jfree.chart.axis.CategoryLabelPositions; -import org.jfree.chart.plot.CategoryPlot; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.title.LegendTitle; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.ui.RectangleInsets; - -public class HistogramGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public HistogramGraph(String title) { - super(title); - } - - protected Dataset generateDataset() { - String s = "S1"; - String s1 = "S2"; - String s2 = "S3"; - String s3 = "Category 1"; - String s4 = "Category 2"; - String s5 = "Category 3"; - String s6 = "Category 4"; - String s7 = "Category 5"; - String s8 = "Category 6"; - String s9 = "Category 7"; - String s10 = "Category 8"; - DefaultCategoryDataset defaultcategorydataset = new DefaultCategoryDataset(); - defaultcategorydataset.addValue(1.0D, s, s3); - defaultcategorydataset.addValue(4D, s, s4); - defaultcategorydataset.addValue(3D, s, s5); - defaultcategorydataset.addValue(5D, s, s6); - defaultcategorydataset.addValue(5D, s, s7); - defaultcategorydataset.addValue(7D, s, s8); - defaultcategorydataset.addValue(7D, s, s9); - defaultcategorydataset.addValue(8D, s, s10); - defaultcategorydataset.addValue(5D, s1, s3); - defaultcategorydataset.addValue(7D, s1, s4); - defaultcategorydataset.addValue(6D, s1, s5); - defaultcategorydataset.addValue(8D, s1, s6); - defaultcategorydataset.addValue(4D, s1, s7); - defaultcategorydataset.addValue(4D, s1, s8); - defaultcategorydataset.addValue(2D, s1, s9); - defaultcategorydataset.addValue(1.0D, s1, s10); - defaultcategorydataset.addValue(4D, s2, s3); - defaultcategorydataset.addValue(3D, s2, s4); - defaultcategorydataset.addValue(2D, s2, s5); - defaultcategorydataset.addValue(3D, s2, s6); - defaultcategorydataset.addValue(6D, s2, s7); - defaultcategorydataset.addValue(3D, s2, s8); - defaultcategorydataset.addValue(4D, s2, s9); - defaultcategorydataset.addValue(3D, s2, s10); - return defaultcategorydataset; - - } - - protected JFreeChart createChart(Dataset dataset) { - - JFreeChart chart = ChartFactory.createBarChart("Histogram Chart", "", "", (DefaultCategoryDataset) dataset, PlotOrientation.VERTICAL, true, true, false); - chart.setBackgroundPaint(Color.white); - CategoryPlot categoryplot = (CategoryPlot) chart.getPlot(); - categoryplot.setBackgroundPaint(new Color(238, 238, 255)); - categoryplot.setDomainAxisLocation(AxisLocation.BOTTOM_OR_RIGHT); - - CategoryAxis categoryaxis = categoryplot.getDomainAxis(); - categoryaxis.setCategoryLabelPositions(CategoryLabelPositions.DOWN_45); - - LegendTitle legendtitle = new LegendTitle(categoryplot.getRenderer(0)); - legendtitle.setMargin(new RectangleInsets(2D, 2D, 2D, 2D)); - - - return chart; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new HistogramGraph(title); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/LineGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/LineGraph.java deleted file mode 100644 index b234dc0..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/LineGraph.java +++ /dev/null @@ -1,153 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.Color; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.plot.CategoryPlot; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.renderer.category.LineAndShapeRenderer; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; - -public class LineGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public LineGraph(String title) { - super(title); - } - - protected Dataset generateDataset() { - // row keys... - String series1 = "First"; - String series2 = "Second"; - String series3 = "Third"; - - // column keys... - String type1 = "Type 1"; - String type2 = "Type 2"; - String type3 = "Type 3"; - String type4 = "Type 4"; - String type5 = "Type 5"; - String type6 = "Type 6"; - String type7 = "Type 7"; - String type8 = "Type 8"; - - // create the dataset... - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - - dataset.addValue(1.0, series1, type1); - dataset.addValue(4.0, series1, type2); - dataset.addValue(3.0, series1, type3); - dataset.addValue(5.0, series1, type4); - dataset.addValue(5.0, series1, type5); - dataset.addValue(7.0, series1, type6); - dataset.addValue(7.0, series1, type7); - dataset.addValue(8.0, series1, type8); - - dataset.addValue(5.0, series2, type1); - dataset.addValue(7.0, series2, type2); - dataset.addValue(6.0, series2, type3); - dataset.addValue(8.0, series2, type4); - dataset.addValue(4.0, series2, type5); - dataset.addValue(4.0, series2, type6); - dataset.addValue(2.0, series2, type7); - dataset.addValue(1.0, series2, type8); - - dataset.addValue(4.0, series3, type1); - dataset.addValue(3.0, series3, type2); - dataset.addValue(2.0, series3, type3); - dataset.addValue(3.0, series3, type4); - dataset.addValue(6.0, series3, type5); - dataset.addValue(3.0, series3, type6); - dataset.addValue(4.0, series3, type7); - dataset.addValue(3.0, series3, type8); - return dataset; - } - - protected JFreeChart createChart(Dataset dataset) { - - // create the chart... - JFreeChart chart = ChartFactory.createLineChart( - this.getTitle(), // chart title - "", // domain axis label - "", // range axis label - (DefaultCategoryDataset)dataset, // data - PlotOrientation.VERTICAL, // orientation - true, // include legend - true, // tooltips - false // urls - ); - chart.setBackgroundPaint(Color.white); - CategoryPlot plot = chart.getCategoryPlot(); -// plot.setBackgroundPaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - plot.setDomainCrosshairVisible(true); - plot.setDomainGridlinesVisible(true); - plot.setRangeCrosshairVisible(true); - plot.setRenderer(new LineAndShapeRenderer(true,true)); - - //deprecated - /* - LineAndShapeRenderer renderer = (LineAndShapeRenderer) plot.getRenderer(); - renderer.setShapesVisible(true); - renderer.setDrawOutlines(true); - renderer.setUseFillPaint(true); - renderer.setFillPaint(Color.white); -*/ - - - - - return chart; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new LineGraph(title); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/NumericSeriesGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/NumericSeriesGraph.java deleted file mode 100644 index 6177e38..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/NumericSeriesGraph.java +++ /dev/null @@ -1,149 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.Color; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.NumberAxis; -import org.jfree.chart.plot.XYPlot; -import org.jfree.chart.renderer.xy.XYSplineRenderer; -import org.jfree.data.general.Dataset; -import org.jfree.data.xy.XYDataset; -import org.jfree.data.xy.XYSeries; -import org.jfree.data.xy.XYSeriesCollection; -import org.jfree.ui.RectangleInsets; - -public class NumericSeriesGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public NumericSeriesGraph(String title) { - super(title); - } - - protected Dataset generateDataset() { - XYSeries xyseries = new XYSeries("Series 1"); - xyseries.add(2D, 56.270000000000003D); - xyseries.add(3D, 41.32D); - xyseries.add(4D, 31.449999999999999D); - xyseries.add(5D, 30.050000000000001D); - xyseries.add(6D, 24.690000000000001D); - xyseries.add(7D, 19.780000000000001D); - xyseries.add(8D, 20.940000000000001D); - xyseries.add(9D, 16.73D); - xyseries.add(10D, 14.210000000000001D); - xyseries.add(11D, 12.44D); - XYSeriesCollection xyseriescollection = new XYSeriesCollection(xyseries); - XYSeries xyseries1 = new XYSeries("Series 2"); - xyseries1.add(11D, 56.270000000000003D); - xyseries1.add(10D, 41.32D); - xyseries1.add(9D, 31.449999999999999D); - xyseries1.add(8D, 30.050000000000001D); - xyseries1.add(7D, 24.690000000000001D); - xyseries1.add(6D, 19.780000000000001D); - xyseries1.add(5D, 20.940000000000001D); - xyseries1.add(4D, 16.73D); - xyseries1.add(3D, 14.210000000000001D); - xyseries1.add(2D, 12.44D); - xyseriescollection.addSeries(xyseries1); - - return xyseriescollection; - } - - protected JFreeChart createChart(Dataset dataset) { - - NumberAxis numberaxis = new NumberAxis("X"); - numberaxis.setAutoRangeIncludesZero(true); - NumberAxis numberaxis1 = new NumberAxis("Y"); - numberaxis1.setAutoRangeIncludesZero(true); - XYSplineRenderer xysplinerenderer = new XYSplineRenderer(); - XYPlot xyplot = new XYPlot((XYDataset) dataset, numberaxis, numberaxis1, xysplinerenderer); - xyplot.setBackgroundPaint(Color.lightGray); - xyplot.setDomainGridlinePaint(Color.white); - xyplot.setRangeGridlinePaint(Color.white); - xyplot.setAxisOffset(new RectangleInsets(4D, 4D, 4D, 4D)); - JFreeChart chart = new JFreeChart("Numeric Series", JFreeChart.DEFAULT_TITLE_FONT, xyplot, true); - - return chart; - } - - @Override - protected Dataset convert2Dataset(GraphData st) { - - List> pointslist = st.getData(); - - // NOTE: after the graph generation graphs are inverted in x and y - int numbOfRows = pointslist.size(); - XYSeriesCollection xyseriescollection = new XYSeriesCollection(); - - if (numbOfRows > 0) { - int numbOfCols = pointslist.get(0).getEntries().size(); - // calclulation will be made only for the first series - - for (int x = 0; x < numbOfRows; x++) { - - String serieslabel = pointslist.get(x).getLabel(); - XYSeries xyseries = new XYSeries(serieslabel); - - for (int y = 0; y < numbOfCols; y++) { -// String xlabel = pointslist.get(x).getEntries().get(y).getLabel(); - double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue(); - xyseries.add(y + 1, value); - } - - xyseriescollection.addSeries(xyseries); - } - } - return xyseriescollection; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new NumericSeriesGraph(title); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/PieGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/PieGraph.java deleted file mode 100644 index 167f765..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/PieGraph.java +++ /dev/null @@ -1,149 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.data.general.DefaultPieDataset; -import org.jfree.util.TableOrder; - -public class PieGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public PieGraph(String title) { - super(title); - } - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - - protected Dataset generateDataset() { - DefaultPieDataset dataset = new DefaultPieDataset(); - dataset.setValue("One", Double.valueOf(43.2)); - dataset.setValue("Two", Double.valueOf(10.0)); - dataset.setValue("Three", Double.valueOf(27.5)); - dataset.setValue("Four", Double.valueOf(17.5)); - dataset.setValue("Five", Double.valueOf(11.0)); - dataset.setValue("Six", Double.valueOf(19.4)); - - return dataset; - } -/* - protected JFreeChart createChart(Dataset dataset) { - - PiePlot plot = new PiePlot((DefaultPieDataset) dataset); - JFreeChart chart = new JFreeChart(plot); - - return chart; - } -*/ - protected JFreeChart createChart(Dataset dataset) { - JFreeChart chart = ChartFactory.createMultiplePieChart( - "Multiple Pie Chart", // chart title - (DefaultCategoryDataset)dataset, // dataset - TableOrder.BY_ROW, - true, // include legend - true, - false - ); - return chart; - } - - - protected Dataset convert2DatasetOld(GraphData st) { - - DefaultPieDataset dataset = new DefaultPieDataset(); - List> pointslist = st.getData(); - - // NOTE: after the graph generation graphs are inverted in x and y - int numbOfRows = pointslist.size(); - if (numbOfRows > 0) { - int numbOfCols = pointslist.get(0).getEntries().size(); - // calclulation will be made only for the first series - int x = 0; - - String xlabel = pointslist.get(x).getLabel(); - - //calculate maximum - double max = 0; - for (int y = 0; y < numbOfCols; y++) { - double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue(); - if (value>max){ - max = value; - } - } - - - for (int y = 0; y < numbOfCols; y++) { - double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue(); - value = (value/max)*100; - String ylabel = pointslist.get(x).getEntries().get(y).getLabel(); - AnalysisLogger.getLogger().info(xlabel + ":" + ylabel +"->" + value); - dataset.setValue(xlabel + ":" + ylabel, value); - } - - } - return dataset; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new PieGraph(title); - } - - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/RadarGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/RadarGraph.java deleted file mode 100644 index a070ea0..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/RadarGraph.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.gcube.contentmanagement.graphtools.plotting.graphs; - - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.plot.SpiderWebPlot; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; - -public class RadarGraph extends GenericStandaloneGraph{ - - /** - * - */ - private static final long serialVersionUID = 1L; - - public RadarGraph(String title) { - super(title); - } - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - - @Override - protected Dataset generateDataset() { - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - dataset.addValue(35.0, "S1", "C1"); - dataset.addValue(45.0, "S1", "C2"); - dataset.addValue(55.0, "S1", "C3"); - dataset.addValue(15.0, "S1", "C4"); - dataset.addValue(25.0, "S1", "C5"); - dataset.addValue(39.0, "S2", "C1"); - dataset.addValue(20.0, "S2", "C2"); - dataset.addValue(34.0, "S2", "C3"); - dataset.addValue(30.0, "S2", "C4"); - dataset.addValue(13.0, "S2", "C5"); - return dataset; - } - - - @Override - protected JFreeChart createChart(Dataset dataset) { - - SpiderWebPlot plot = new SpiderWebPlot((DefaultCategoryDataset)dataset); - JFreeChart chart = new JFreeChart(plot); - - return chart; - } - - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new RadarGraph(title); - } - - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/ScatterGraphGeneric.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/ScatterGraphGeneric.java deleted file mode 100644 index 4049fe2..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/ScatterGraphGeneric.java +++ /dev/null @@ -1,131 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.BasicStroke; -import java.awt.Color; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.NumberAxis; -import org.jfree.chart.plot.CategoryPlot; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.plot.XYPlot; -import org.jfree.chart.renderer.category.LineAndShapeRenderer; -import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.data.xy.XYDataset; -import org.jfree.data.xy.XYSeries; -import org.jfree.data.xy.XYSeriesCollection; - -public class ScatterGraphGeneric extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public ScatterGraphGeneric(String title) { - super(title); - } - - protected Dataset generateDataset() { - return null; - } - - protected JFreeChart createChart(Dataset dataset) { - - // create the chart... - JFreeChart chart = ChartFactory.createLineChart( - "", // chart title - "", // domain axis label - "", // range axis label - (DefaultCategoryDataset)dataset, // data - PlotOrientation.VERTICAL, // orientation - true, // include legend - true, // tooltips - false // urls - ); - chart.setBackgroundPaint(Color.white); - CategoryPlot plot = chart.getCategoryPlot(); - /* - plot.setDomainGridlineStroke(new BasicStroke(0.0F)); - plot.setDomainGridlinePaint(Color.blue); - plot.setRangeGridlineStroke(new BasicStroke(0.0F)); - plot.setRangeMinorGridlineStroke(new BasicStroke(0.0F)); - plot.setRangeGridlinePaint(Color.blue); - plot.setRangeMinorGridlinesVisible(true); - plot.setNoDataMessage("NO DATA"); - plot.setRangePannable(true); - plot.setRangeZeroBaselineVisible(true); - */ - plot.setBackgroundPaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - plot.setDomainCrosshairVisible(true); - plot.setDomainGridlinesVisible(false); - plot.setRangeCrosshairVisible(true); - plot.setRenderer(new LineAndShapeRenderer(false,true)); - - //deprecated - /* - LineAndShapeRenderer renderer = (LineAndShapeRenderer) plot.getRenderer(); - renderer.setShapesVisible(true); - renderer.setDrawOutlines(true); - renderer.setUseFillPaint(true); - renderer.setFillPaint(Color.white); -*/ - - - - - return chart; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new ScatterGraphGeneric(title); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/ScatterGraphNumeric.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/ScatterGraphNumeric.java deleted file mode 100644 index 49dd47d..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/ScatterGraphNumeric.java +++ /dev/null @@ -1,144 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.BasicStroke; -import java.awt.Color; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.NumberAxis; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.plot.XYPlot; -import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.data.xy.XYDataset; -import org.jfree.data.xy.XYSeries; -import org.jfree.data.xy.XYSeriesCollection; - -public class ScatterGraphNumeric extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public ScatterGraphNumeric(String title) { - super(title); - } - - protected Dataset generateDataset() { - return null; - } - - protected JFreeChart createChart(Dataset dataset) { - - JFreeChart jfreechart = ChartFactory.createScatterPlot("", "", "", (XYDataset)dataset, PlotOrientation.VERTICAL, true, true, false); - XYPlot xyplot = (XYPlot)jfreechart.getPlot(); - xyplot.setNoDataMessage("NO DATA"); - xyplot.setDomainPannable(true); - xyplot.setRangePannable(true); - xyplot.setDomainZeroBaselineVisible(true); - xyplot.setRangeZeroBaselineVisible(true); - xyplot.setDomainGridlineStroke(new BasicStroke(0.0F)); - xyplot.setDomainMinorGridlineStroke(new BasicStroke(0.0F)); - xyplot.setDomainGridlinePaint(Color.blue); - xyplot.setRangeGridlineStroke(new BasicStroke(0.0F)); - xyplot.setRangeMinorGridlineStroke(new BasicStroke(0.0F)); - xyplot.setRangeGridlinePaint(Color.blue); - xyplot.setDomainMinorGridlinesVisible(true); - xyplot.setRangeMinorGridlinesVisible(true); - XYLineAndShapeRenderer xylineandshaperenderer = (XYLineAndShapeRenderer)xyplot.getRenderer(); - xylineandshaperenderer.setSeriesOutlinePaint(0, Color.black); - xylineandshaperenderer.setUseOutlinePaint(true); - NumberAxis numberaxis = (NumberAxis)xyplot.getDomainAxis(); - numberaxis.setAutoRangeIncludesZero(false); - numberaxis.setTickMarkInsideLength(2.0F); - numberaxis.setTickMarkOutsideLength(2.0F); - numberaxis.setMinorTickCount(2); - numberaxis.setMinorTickMarksVisible(true); - NumberAxis numberaxis1 = (NumberAxis)xyplot.getRangeAxis(); - numberaxis1.setTickMarkInsideLength(2.0F); - numberaxis1.setTickMarkOutsideLength(2.0F); - numberaxis1.setMinorTickCount(2); - numberaxis1.setMinorTickMarksVisible(true); - return jfreechart; - } - - @Override - protected Dataset convert2Dataset(GraphData st) { - - List> pointslist = st.getData(); - - // NOTE: after the graph generation graphs are inverted in x and y - int numbOfRows = pointslist.size(); - XYSeriesCollection xyseriescollection = new XYSeriesCollection(); - - if (numbOfRows > 0) { - int numbOfCols = pointslist.get(0).getEntries().size(); - // calculation will be made only for the first series - - for (int x = 0; x < numbOfRows; x++) { - - String serieslabel = pointslist.get(x).getLabel(); - XYSeries xyseries = new XYSeries(serieslabel); - - for (int y = 0; y < numbOfCols; y++) { -// String xlabel = pointslist.get(x).getEntries().get(y).getLabel(); - double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue(); - xyseries.add(y + 1, value); - } - - xyseriescollection.addSeries(xyseries); - } - } - return xyseriescollection; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new ScatterGraphNumeric(title); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/TimeSeriesGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/TimeSeriesGraph.java deleted file mode 100644 index db47b0e..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/TimeSeriesGraph.java +++ /dev/null @@ -1,186 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.Color; -import java.sql.Date; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.contentmanagement.graphtools.utils.DateGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.DateAxis; -import org.jfree.chart.plot.XYPlot; -import org.jfree.chart.renderer.xy.XYItemRenderer; -import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; -import org.jfree.data.general.Dataset; -import org.jfree.data.time.Day; -import org.jfree.data.time.Minute; -import org.jfree.data.time.TimeSeries; -import org.jfree.data.time.TimeSeriesCollection; -import org.jfree.data.xy.XYDataset; -import org.jfree.ui.RectangleInsets; - -public class TimeSeriesGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - private static final String formatYear= "yyyy"; - private static final String formatMonth= "MM-yyyy"; - private static final String formatDay= "MM-dd-yyyy"; - private String timeseriesformat; - - - public TimeSeriesGraph(String title) { - super(title); - } - - protected Dataset generateDataset() { - TimeSeriesCollection dataset = new TimeSeriesCollection(); - - final TimeSeries s1 = new TimeSeries("Series 1"); - s1.add(new Minute(0, 0, 7, 12, 2003), 1.2); - s1.add(new Minute(30, 12, 7, 12, 2003), 3.0); - s1.add(new Minute(15, 14, 7, 12, 2003), 8.0); - - final TimeSeries s2 = new TimeSeries("Series 2"); - s2.add(new Minute(0, 3, 7, 12, 2003), 0.0); - s2.add(new Minute(30, 9, 7, 12, 2003), 0.0); - s2.add(new Minute(15, 10, 7, 12, 2003), 0.0); - - dataset.addSeries(s1); - dataset.addSeries(s2); - - return dataset; - } - - protected JFreeChart createChart(Dataset dataset) { - - - JFreeChart chart = ChartFactory.createTimeSeriesChart( - "Time Series", // title - "", // x-axis label - "", // y-axis label - (XYDataset)dataset, // data - true, // create legend? - true, // generate tooltips? - false // generate URLs? - ); - - chart.setBackgroundPaint(Color.white); - - XYPlot plot = (XYPlot) chart.getPlot(); - plot.setBackgroundPaint(Color.lightGray); - plot.setDomainGridlinePaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - plot.setAxisOffset(new RectangleInsets(5.0, 5.0, 5.0, 5.0)); - plot.setDomainCrosshairVisible(true); - plot.setRangeCrosshairVisible(true); - - XYItemRenderer r = plot.getRenderer(); - if (r instanceof XYLineAndShapeRenderer) { - XYLineAndShapeRenderer renderer = (XYLineAndShapeRenderer) r; - renderer.setBaseShapesVisible(true); - renderer.setBaseShapesFilled(true); - renderer.setDrawSeriesLineAsPath(true); - } - - DateAxis axis = (DateAxis) plot.getDomainAxis(); - - axis.setDateFormatOverride(new SimpleDateFormat(timeseriesformat)); - - return chart; - } - - @Override - protected Dataset convert2Dataset(GraphData st) { - - List> pointslist = st.getData(); - - // NOTE: after the graph generation graphs are inverted in x and y - int numbOfRows = pointslist.size(); - TimeSeriesCollection timeseriescollection = new TimeSeriesCollection(); - - if (numbOfRows > 0) { - int numbOfCols = pointslist.get(0).getEntries().size(); - // calclulation will be made only for the first series - - for (int x = 0; x < numbOfRows; x++) { - - String serieslabel = pointslist.get(x).getLabel(); - TimeSeries xyseries = new TimeSeries(serieslabel); - - for (int y = 0; y < numbOfCols; y++) { - String xlabel = pointslist.get(x).getEntries().get(y).getLabel(); - double value = pointslist.get(x).getEntries().get(y).getValue().doubleValue(); - Calendar cal = DateGuesser.convertDate(xlabel); - String granularity = DateGuesser.granularity(xlabel); - - if (granularity.equals(DateGuesser.YEAR)) timeseriesformat = formatYear; - else if (granularity.equals(DateGuesser.MONTH)) timeseriesformat = formatMonth; - else if (granularity.equals(DateGuesser.DAY)) timeseriesformat = formatDay; - - AnalysisLogger.getLogger().debug("TimeSeriesGraph-> granularity "+granularity+" format "+timeseriesformat); - - xyseries.add(new Day(new Date(cal.getTimeInMillis())),value); - - } - - timeseriescollection.addSeries(xyseries); - } - } - return timeseriescollection; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - TimeSeriesGraph tsg = new TimeSeriesGraph(title); - tsg.timeseriesformat = timeseriesformat; - return tsg; - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/TransectLineGraph.java b/src/org/gcube/contentmanagement/graphtools/plotting/graphs/TransectLineGraph.java deleted file mode 100644 index fc74931..0000000 --- a/src/org/gcube/contentmanagement/graphtools/plotting/graphs/TransectLineGraph.java +++ /dev/null @@ -1,293 +0,0 @@ -/* =========================================================== - * JFreeChart : a free chart library for the Java(tm) platform - * =========================================================== - * - * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. - * - * Project Info: http://www.jfree.org/jfreechart/index.html - * - * This library is free software; you can redistribute it and/or modify it - * under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation; either version 2.1 of the License, or - * (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, but - * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY - * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public - * License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, - * USA. - * - * [Java is a trademark or registered trademark of Sun Microsystems, Inc. - * in the United States and other countries.] - * - * ------------------ - * PieGraph.java - * ------------------ - * (C) Copyright 2003-2007, by Object Refinery Limited and Contributors. - * - * Original Author: David Gilbert (for Object Refinery Limited); - * Contributor(s): ; - * - * Changes - * ------- - * 09-Mar-2005 : Version 1, copied from the demo collection that ships with - * the JFreeChart Developer Guide (DG); - * - */ - -package org.gcube.contentmanagement.graphtools.plotting.graphs; - -import java.awt.Color; -import java.awt.Font; -import java.awt.Graphics2D; -import java.awt.geom.Rectangle2D; -import java.util.ArrayList; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.AxisLocation; -import org.jfree.chart.axis.AxisState; -import org.jfree.chart.axis.CategoryAxis; -import org.jfree.chart.axis.CategoryLabelPositions; -import org.jfree.chart.axis.CategoryTick; -import org.jfree.chart.plot.CategoryPlot; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.renderer.category.LineAndShapeRenderer; -import org.jfree.chart.title.TextTitle; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.text.TextBlock; -import org.jfree.text.TextBlockAnchor; -import org.jfree.text.TextLine; -import org.jfree.ui.RectangleEdge; -import org.jfree.ui.RectangleInsets; -import org.jfree.ui.TextAnchor; - -public class TransectLineGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - - public TransectLineGraph(String title) { - super(title); - } - - protected Dataset generateDataset() { - // row keys... - String series1 = "First"; - String series2 = "Second"; - String series3 = "Third"; - - // column keys... - String type1 = "Type 1"; - String type2 = "Type 2"; - String type3 = "Type 3"; - String type4 = "Type 4"; - String type5 = "Type 5"; - String type6 = "Type 6"; - String type7 = "Type 7"; - String type8 = "Type 8"; - - // create the dataset... - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - - dataset.addValue(1.0, series1, type1); - dataset.addValue(4.0, series1, type2); - dataset.addValue(3.0, series1, type3); - dataset.addValue(5.0, series1, type4); - dataset.addValue(5.0, series1, type5); - dataset.addValue(7.0, series1, type6); - dataset.addValue(7.0, series1, type7); - dataset.addValue(8.0, series1, type8); - - dataset.addValue(5.0, series2, type1); - dataset.addValue(7.0, series2, type2); - dataset.addValue(6.0, series2, type3); - dataset.addValue(8.0, series2, type4); - dataset.addValue(4.0, series2, type5); - dataset.addValue(4.0, series2, type6); - dataset.addValue(2.0, series2, type7); - dataset.addValue(1.0, series2, type8); - - dataset.addValue(4.0, series3, type1); - dataset.addValue(3.0, series3, type2); - dataset.addValue(2.0, series3, type3); - dataset.addValue(3.0, series3, type4); - dataset.addValue(6.0, series3, type5); - dataset.addValue(3.0, series3, type6); - dataset.addValue(4.0, series3, type7); - dataset.addValue(3.0, series3, type8); - return dataset; - } - - protected JFreeChart createChart(Dataset dataset) { - - DefaultCategoryDataset dataset1 = new DefaultCategoryDataset(); - DefaultCategoryDataset dataset2 = new DefaultCategoryDataset(); - DefaultCategoryDataset dataset0 = (DefaultCategoryDataset)dataset; - ArrayList relevantindexes = new ArrayList(); - - for (Object row:dataset0.getRowKeys()){ - int spikescounter=0; - int pointscounter=0; - int allcounter=0; - int mincolumns = 0; - int maxcolumns = dataset0.getColumnCount()-1; - int medcolumns = (maxcolumns)/2; - for (Object column:dataset0.getColumnKeys()){ -// System.out.println("row "+row+" column "+column ); - double value = dataset0.getValue((String)row, (String)column).doubleValue(); - String xlab = (String) column; - String annotation =""; - - String x1lab = xlab; - int commaindex = xlab.indexOf(";"); - if (commaindex>0){ - annotation = xlab.substring(commaindex+1); - x1lab = xlab.substring(0,commaindex); - dataset2.addValue(value, (String)row, ""+(allcounter+1)+": "+annotation); - spikescounter++; - relevantindexes.add(allcounter); - } - - else{ - if ((allcounter==mincolumns)||(allcounter==maxcolumns)||(allcounter==medcolumns)) - relevantindexes.add(allcounter); - - dataset2.addValue(value, (String)row, ""+(allcounter+1)+""); - pointscounter++; - } - allcounter++; - dataset1.addValue(value, (String)row, x1lab); - } - } - - - - // create the chart... - - JFreeChart chart = ChartFactory.createLineChart( - " ", // chart title - "", // domain axis label - "", // range axis label - (DefaultCategoryDataset)dataset1, // data - PlotOrientation.VERTICAL, // orientation - false, // include legend - true, // tooltips - false // urls - ); - - chart.setTitle(new TextTitle(" ", new Font("sansserif", Font.BOLD, 60))); - - chart.setBackgroundPaint(Color.white); - - CategoryPlot plot = chart.getCategoryPlot(); - plot.setBackgroundPaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - - plot.setDomainCrosshairVisible(true); - plot.setDomainGridlinesVisible(true); - plot.setRangeCrosshairVisible(true); -// plot.setRenderer(new LineAndShapeRenderer(true,true)); - plot.setRenderer(new LineAndShapeRenderer(true,false)); - plot.setAxisOffset(new RectangleInsets(1D, 1D, 1D, 1D)); - - - plot.setDomainAxis(0,new CustomXAxis("",dataset1,relevantindexes)); - CategoryAxis categoryaxis1 = plot.getDomainAxis(0); - categoryaxis1.setCategoryLabelPositions(CategoryLabelPositions.UP_45); - plot.mapDatasetToDomainAxis(0, 0); - - plot.setDataset(1, (DefaultCategoryDataset)dataset2); - plot.setDomainAxis(1,new CustomXAxis("",dataset2,relevantindexes)); - CategoryAxis categoryaxis2 = plot.getDomainAxis(1); - categoryaxis2.setCategoryLabelPositions(CategoryLabelPositions.UP_45); - plot.mapDatasetToDomainAxis(1, 1); - plot.setDomainAxisLocation(1, AxisLocation.TOP_OR_LEFT); - - -// categoryaxis2.setLabelInsets(new RectangleInsets(100, 100, 100, 100)); - -// categoryaxis2.setLowerMargin(0.05D); -// categoryaxis2.setUpperMargin(1D); - -// plot.mapDatasetToRangeAxis(1, 1); - //deprecated - /* - LineAndShapeRenderer renderer = (LineAndShapeRenderer) plot.getRenderer(); - renderer.setShapesVisible(true); - renderer.setDrawOutlines(true); - renderer.setUseFillPaint(true); - renderer.setFillPaint(Color.white); -*/ - - -// rangeAxis.setStandardTickUnits(ValueAxis); -// rangeAxis.setAutoRangeIncludesZero(false); -// rangeAxis.setUpperMargin(0.12); - - chart.setPadding(new RectangleInsets(30, 30, 90, 90)); - - big=true; - chart.getPlot().setBackgroundPaint(Color.white); - - return chart; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - - return new TransectLineGraph(title); - } - - - - static class CustomXAxis extends CategoryAxis - { - - DefaultCategoryDataset dataset; - List samplingindexes; - public java.util.List refreshTicks(Graphics2D graphics2d, AxisState axisstate, Rectangle2D rectangle2d, RectangleEdge rectangleedge) - { - ArrayList arraylist = new ArrayList(); - int size = dataset.getColumnCount(); - - - - for (int i=0;i indexes) - { - super(s); - dataset = d; - samplingindexes = indexes; - } - } -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/RegressionTestAllGraphs.java b/src/org/gcube/contentmanagement/graphtools/tests/RegressionTestAllGraphs.java deleted file mode 100644 index 8dcaf1f..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/RegressionTestAllGraphs.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.GaussianDistributionGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.LineGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.PieGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.ScatterGraphGeneric; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - - -public class RegressionTestAllGraphs { - - - public static void main(String[] args) throws Exception{ - - String table = "rdm7d053300d89e11e087918065b36ddd05"; - String xDimension = "field3"; - String yDimension = "field5"; - String groupDimension = "field2"; - String linesColumn = "field4"; - String filter1 = "Perciformes"; - String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - - //database Parameters - conf.setDatabaseURL("jdbc:postgresql://localhost/testdb"); - conf.setDatabaseUserName("gcube"); - conf.setDatabasePassword("d4science2"); - conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - - stg.init("./cfg/",conf); - -// stg.addColumnFilter("field4", "F","="); - - //String generation - GraphGroups gg = stg.generateGraphs(100, table, xDimension, yDimension, groupDimension, linesColumn, filter1, filter2); - //graph plot - RadarGraph series = new RadarGraph(""); - series.renderGraphGroup(gg); - - HistogramGraph series2 = new HistogramGraph(""); - series2.renderGraphGroup(gg); - - LineGraph series3 = new LineGraph(""); - series3.renderGraphGroup(gg); - - PieGraph series4 = new PieGraph(""); - series4.renderGraphGroup(gg); - - GaussianDistributionGraph series5 = new GaussianDistributionGraph(""); - series5.renderGraphGroup(gg); - - ScatterGraphGeneric series6 = new ScatterGraphGeneric(""); - series6.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/old/ExampleDerivative.java b/src/org/gcube/contentmanagement/graphtools/tests/old/ExampleDerivative.java deleted file mode 100644 index d2695b9..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/old/ExampleDerivative.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests.old; - - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.data.BigSamplesTable; -import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - -import com.rapidminer.RapidMiner; -import com.rapidminer.example.ExampleSet; -import com.rapidminer.operator.preprocessing.sampling.AbsoluteSampling; -import com.rapidminer.tools.OperatorService; -import com.rapidminer.tools.math.MathFunctions; - -public class ExampleDerivative { - - - public static void main(String[] args) throws Exception{ - - String table = "ts_a904da30_b4fc_11df_800d_bcef80d51986"; - String xDimension = "field1"; - String yDimension = "field4"; - String groupDimension = "field2"; - String speciesColumn = "field3"; - String filter1 = "Toluene"; -// String filter2 = "Osteichthyes"; - StatisticsGenerator stg = new StatisticsGenerator(); - - stg.init("./cfg/"); - - GraphGroups gg = stg.generateGraphs(200, table, xDimension, yDimension, groupDimension, speciesColumn, filter1); - - TimeSeriesGraph series = new TimeSeriesGraph(""); - series.renderGraphGroup(gg); - - } - - - -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/old/ExampleMathFunctions.java b/src/org/gcube/contentmanagement/graphtools/tests/old/ExampleMathFunctions.java deleted file mode 100644 index 2643b1b..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/old/ExampleMathFunctions.java +++ /dev/null @@ -1,79 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests.old; - - -import org.gcube.contentmanagement.graphtools.data.BigSamplesTable; - -import com.rapidminer.RapidMiner; -import com.rapidminer.example.ExampleSet; -import com.rapidminer.operator.preprocessing.sampling.AbsoluteSampling; -import com.rapidminer.tools.OperatorService; -import com.rapidminer.tools.math.MathFunctions; - -public class ExampleMathFunctions { - - - public static void main(String[] args) throws Exception{ - - double variance = MathFunctions.variance(new double[] { 0.1, 0.1, 0.0, -0.1 }, Double.NEGATIVE_INFINITY); - double covariance = MathFunctions.correlation(new double[] { 0.1, 0.2, -0.3, 0.0 }, new double[] { 0.0, 0.1, 0.1, -0.1 }); - double meanc = mean(new double[] { 0.1, 0.1, 0.0, -0.1 }); - -// String pluginDirString = new File("C:/Users/coro/Desktop/WorkFolder/Workspace/RapidMinerPlugins/TextProcessing/Vega/lib/").getAbsolutePath(); -// System.setProperty(RapidMiner.PROPERTY_RAPIDMINER_INIT_PLUGINS_LOCATION, pluginDirString); - System.setProperty("rapidminer.init.operators","C:/Users/coro/Desktop/WorkFolder/Workspace/RapidMiner_Wasat/resources/operators.xml"); - - RapidMiner.init (); - - BigSamplesTable bst = new BigSamplesTable(); - bst.addSampleRow("prova 1", 10, 12,13,14,15); - bst.addSampleRow("prova 2", 20, 15,14,15); - bst.addSampleRow("prova 3", 30, 11,110,150); - System.out.println(bst.toString()); - -// OperatorService.createOperator(ExampleSource.class); -// as.apply(bst.generateExampleSet()); - -// OperatorChain wvtoolOperator = (OperatorChain) OperatorService.createOperator(SingleTextInput.class); - - -// Learner learner = (Learner)OperatorService .createOperator("J48"); - - AbsoluteSampling as = (AbsoluteSampling)OperatorService .createOperator("AbsoluteSampling"); -// SimpleSampling ss = (SimpleSampling)OperatorService .createOperator("Sampling"); -// ss.setParameter("sample_ratio", "0.8"); - as.setParameter("sample_size", "2"); - as.setParameter("local_random_seed", "-1"); - - ExampleSet exampleSet = bst.generateExampleSet(); - System.out.println(exampleSet.toResultString()); - - -// ExampleSet exampleSetOut = ss.apply(exampleSet); - ExampleSet exampleSetOut = as.apply(exampleSet); - System.out.println(exampleSetOut.toResultString()); - - BigSamplesTable bstOut = new BigSamplesTable(); - bstOut.generateSampleTable(exampleSetOut); - System.out.println(bstOut.toString()); - -// as.apply(es); - - - System.out.println("variance "+variance+" covariance "+covariance+" mean "+meanc); - - - - } - - - //================================================= mean - public static double mean(double[] p) { - double sum = 0; // sum of all the elements - for (int i=0; i attributes = new LinkedList(); - for (int a = 0; a < getMyNumOfAttributes(); a++) { - attributes.add(AttributeFactory.createAttribute("att" + a, Ontology.REAL)); - } - Attribute label = AttributeFactory.createAttribute(" label ", Ontology.NOMINAL); - attributes.add(label); - // create table - MemoryExampleTable table = new MemoryExampleTable(attributes); - // ll table (here : only real values ) - for (int d = 0; d < getMyNumOfDataRows(); d++) { - double[] data = new double[attributes.size()]; - for (int a = 0; a < getMyNumOfAttributes(); a++) { - // all with proper data here - data[a] = getMyValue(d, a); - } - // maps the nominal classi cation to a double value - data[data.length - 1] = label.getMapping().mapString(getMyClassification(d)); - // add data row - table.addDataRow(new DoubleArrayDataRow(data)); - } - // create example set - ExampleSet exampleSet = table.createExampleSet(label); - } - - - //to be defined in future applications - private static int getMyNumOfAttributes() { - return 3; - } - - private static int getMyNumOfDataRows() { - return 10; - } - - private static double getMyValue(int d, int a) { - return 10; - } - - private static String getMyClassification(int d) { - return "ciao"; - } -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/old/TestConverters.java b/src/org/gcube/contentmanagement/graphtools/tests/old/TestConverters.java deleted file mode 100644 index 9d4550f..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/old/TestConverters.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests.old; - -import java.util.List; - -import org.gcube.contentmanagement.graphtools.data.BigSamplesTable; -import org.gcube.contentmanagement.graphtools.data.conversions.GraphConverter2D; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; - - -public class TestConverters { - - - public static void main1(String[] args) throws Exception{ - BigSamplesTable bst = new BigSamplesTable(); - - bst.addSampleRow("prova 1", 10, 15); - bst.addSampleRow("prova 2", 10, 12); - bst.addSampleRow("prova 3", 30, 11); - bst.addSampleRow("prova 1", 10, 15); - bst.addSampleRow("prova 2", 10, 12); - bst.addSampleRow("prova 3", 30, 11); - bst.addSampleRow("prova 1", 10, 15); - bst.addSampleRow("prova 2", 10, 12); - bst.addSampleRow("prova 3", 30, 11); - bst.addSampleRow("prova 1", 10, 15); - bst.addSampleRow("prova 2", 10, 12); - bst.addSampleRow("prova 3", 30, 11); - bst.addSampleRow("prova 1", 10, 15); - bst.addSampleRow("prova 2", 10, 12); - bst.addSampleRow("prova 3", 30, 11); - bst.addSampleRow("prova 1", 10, 15); - bst.addSampleRow("prova 2", 10, 12); - bst.addSampleRow("prova 3", 30, 11); - bst.addSampleRow("prova 1", 10, 15); - bst.addSampleRow("prova 2", 10, 12); - bst.addSampleRow("prova 3", 30, 11); - - - System.out.println(bst.toString()); - - - List> graphicus = GraphConverter2D.convert(bst); - graphicus = GraphConverter2D.reduceDimension(graphicus); - - - System.out.println("finished!"); - - } - - - public static void main(String[] args) throws Exception{ - BigSamplesTable bst = new BigSamplesTable(); - - bst.addSampleRow("1;prova y1", 0, 0); - bst.addSampleRow("1;prova y2", 0, 0); - bst.addSampleRow("1;prova y3", 0, 0); - bst.addSampleRow("prova 1;prova y1", 10, 15); - bst.addSampleRow("prova 2;prova y2", 10, 12); - bst.addSampleRow("prova 3;prova y2", 30, 11); - bst.addSampleRow("prova 3;prova y1", 30, 10); - - System.out.println(bst.toString()); - - List> graphicus = GraphConverter2D.convert(bst); - graphicus = GraphConverter2D.reduceDimension(graphicus); - - System.out.println("finished!"); - - } -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/old/TestDBExtractor.java b/src/org/gcube/contentmanagement/graphtools/tests/old/TestDBExtractor.java deleted file mode 100644 index f36ba0e..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/old/TestDBExtractor.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests.old; - -import java.util.List; -import java.util.Map; - -import org.gcube.contentmanagement.graphtools.abstracts.SamplesTable; -import org.gcube.contentmanagement.graphtools.data.conversions.GraphConverter2D; -import org.gcube.contentmanagement.graphtools.data.databases.CommonDBExtractor; -import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphData; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; -import org.gcube.portlets.user.timeseries.charts.support.types.Point; -import org.hibernate.SessionFactory; - - -public class TestDBExtractor { - - - - public static void main(String[] args) throws Exception{ - - - SessionFactory referenceDBSession = DatabaseFactory.initDBConnection("./hibernate.cfg.xml"); - - CommonDBExtractor extractor = new CommonDBExtractor(referenceDBSession); - - String table = "ts_161efa00_2c32_11df_b8b3_aa10916debe6"; - String xDimension = "field5"; - String groupDimension = "field1"; - String yValue = "field6"; - String speciesColumn = "field3"; - String filter1 = "Brown seaweeds"; - String filter2 = "River eels"; - - Map samplesMap = extractor.getMultiDimTemporalTables(table, xDimension, groupDimension, yValue, speciesColumn, filter1, filter2); - - System.out.println("MAP EXTRACTED : \n"+samplesMap.toString()); - - GraphGroups graphgroups = new GraphGroups(); - - - for (String key:samplesMap.keySet()){ - - SamplesTable stable = samplesMap.get(key); - List> singlegraph = GraphConverter2D.transformTable(stable); - - GraphData grd = new GraphData(singlegraph,true); - graphgroups.addGraph("Distribution for "+key, grd); - - } - - System.out.println("finished!"); - - } -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/old/TextTest.java b/src/org/gcube/contentmanagement/graphtools/tests/old/TextTest.java deleted file mode 100644 index 80dbeb4..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/old/TextTest.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests.old; - -import java.io.File; - -import com.rapidminer.RapidMiner; -import com.rapidminer.operator.IOContainer; -import com.rapidminer.operator.Model; -import com.rapidminer.operator.ModelApplier; -import com.rapidminer.operator.Operator; -import com.rapidminer.operator.OperatorChain; -import com.rapidminer.operator.io.ModelLoader; -import com.rapidminer.tools.OperatorService; - -public class TextTest { - - public static void main(String[] argv) throws Exception { - - String pluginDirString = new File("C:\\Dokumente und Einstellungen\\Mierswa\\Eigene Dateien\\workspace\\RMTextTest\\lib").getAbsolutePath(); - System.setProperty(RapidMiner.PROPERTY_RAPIDMINER_INIT_PLUGINS_LOCATION, pluginDirString); - - File wordListFile = new File("."); - File modelFile = new File("."); - - RapidMiner.init(); - /* - OperatorChain wvtoolOperator = (OperatorChain) OperatorService.createOperator(SingleTextInput.class); - wvtoolOperator.setParameter("input_word_list", wordListFile.getAbsolutePath()); - - wvtoolOperator.addOperator(OperatorService.createOperator(SimpleTokenizer.class)); - wvtoolOperator.addOperator(OperatorService.createOperator(PorterStemmer.class)); - - Operator modelApplier = OperatorService.createOperator(ModelApplier.class); - Operator modelLoader = OperatorService.createOperator(ModelLoader.class); - modelLoader.setParameter(ModelLoader.PARAMETER_MODEL_FILE, modelFile.getAbsolutePath()); - IOContainer container = modelLoader.apply(new IOContainer()); - - Model model = container.get(Model.class); - */ - } -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/show/ExampleStringGraphData.java b/src/org/gcube/contentmanagement/graphtools/tests/show/ExampleStringGraphData.java deleted file mode 100644 index eb8e0ff..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/show/ExampleStringGraphData.java +++ /dev/null @@ -1,61 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests.show; - -import org.gcube.contentmanagement.graphtools.core.StatisticsGenerator; -import org.gcube.contentmanagement.graphtools.plotting.graphs.GaussianDistributionGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.LineGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.PieGraph; -import org.gcube.contentmanagement.graphtools.plotting.graphs.RadarGraph; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.portlets.user.timeseries.charts.support.types.GraphGroups; - - -public class ExampleStringGraphData { - - - public static void main(String[] args) throws Exception{ - String table = "ts_3637f670_430c_11df_a0a2_909e7d074592"; - String xDimension = "field5"; - String yDimension = "field6"; - String groupDimension = "field1"; - String speciesColumn = "field3"; - String filter1 = "Crabs, sea-spiders"; - String filter2 = "Marine fishes not identified"; - StatisticsGenerator stg = new StatisticsGenerator(); - - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - //database Parameters - conf.setDatabaseUserName("root"); -// conf.setDatabasePassword("password"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - - stg.init("./cfg/",conf); - - stg.addColumnFilter("field4", "F","="); - - //String generation - GraphGroups gg = stg.generateGraphs(100, table, xDimension, yDimension, groupDimension, speciesColumn, filter1, filter2); - //graph plot - RadarGraph series = new RadarGraph(""); - series.renderGraphGroup(gg); - - HistogramGraph series2 = new HistogramGraph(""); - series2.renderGraphGroup(gg); - - LineGraph series3 = new LineGraph(""); - series3.renderGraphGroup(gg); - - PieGraph series4 = new PieGraph(""); - series4.renderGraphGroup(gg); - - GaussianDistributionGraph series5 = new GaussianDistributionGraph(""); - series5.renderGraphGroup(gg); - } - -} diff --git a/src/org/gcube/contentmanagement/graphtools/tests/testConnections.java b/src/org/gcube/contentmanagement/graphtools/tests/testConnections.java deleted file mode 100644 index 47c8aa4..0000000 --- a/src/org/gcube/contentmanagement/graphtools/tests/testConnections.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.gcube.contentmanagement.graphtools.tests; - -import java.util.List; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; -import org.hibernate.SessionFactory; - -public class testConnections { - - - public static void main(String args[]) throws Exception{ - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - - conf.setDatabaseURL("jdbc:postgresql://localhost/testdb"); - conf.setDatabaseUserName("gcube"); - conf.setDatabasePassword("d4science2"); - conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - String hibernateDefaultFile = "hibernate.cfg.xml"; - String loggerDefaultFile = "ALog.properties"; - - String configPath = "./cfg/"; - - AnalysisLogger.setLogger(configPath+loggerDefaultFile); - - SessionFactory session = DatabaseFactory.initDBConnection(configPath+hibernateDefaultFile,conf); - List resultSet = DatabaseFactory.executeSQLQuery("select * from hcaf_s limit 10", session); - - for (Object result:resultSet){ - Object [] row = (Object[]) result; - for (int i=0;i 0) { - urlStr += "?" + requestParameters; - } - URL url = new URL(urlStr); - URLConnection conn = url.openConnection(); - - // Get the response - BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); - StringBuffer sb = new StringBuffer(); - String line; - while ((line = rd.readLine()) != null) { - sb.append(line); - } - rd.close(); - result = sb.toString(); - } catch (Exception e) { - e.printStackTrace(); - } - } - return result; - } - - /** - * Reads data from the data reader and posts it to a server via POST request. data - The data you want to send endpoint - The server's address output - writes the server's response to output - * - * @throws Exception - */ - public static void postData(Reader data, URL endpoint, Writer output) throws Exception { - HttpURLConnection urlc = null; - try { - urlc = (HttpURLConnection) endpoint.openConnection(); - try { - urlc.setRequestMethod("POST"); - } catch (ProtocolException e) { - throw new Exception("Shouldn't happen: HttpURLConnection doesn't support POST??", e); - } - urlc.setDoOutput(true); - urlc.setDoInput(true); - urlc.setUseCaches(false); - urlc.setAllowUserInteraction(false); - urlc.setRequestProperty("Content-type", "text/xml; charset=" + "UTF-8"); - - OutputStream out = urlc.getOutputStream(); - - try { - Writer writer = new OutputStreamWriter(out, "UTF-8"); - pipe(data, writer); - writer.close(); - } catch (IOException e) { - throw new Exception("IOException while posting data", e); - } finally { - if (out != null) - out.close(); - } - - InputStream in = urlc.getInputStream(); - try { - Reader reader = new InputStreamReader(in); - pipe(reader, output); - reader.close(); - } catch (IOException e) { - throw new Exception("IOException while reading response", e); - } finally { - if (in != null) - in.close(); - } - - } catch (IOException e) { - throw new Exception("Connection error (is server running at " + endpoint + " ?): " + e); - } finally { - if (urlc != null) - urlc.disconnect(); - } - } - - // performs a simple Get from a remote url - public static Object getJSonData(String endpoint, String requestParameters, Type outputClass) throws Exception { - String output = sendGetRequest(endpoint, requestParameters); - Gson gson = new Gson(); - // AnalysisLogger.getLogger().debug("HttpRequest-> OUTPUT JSON:\n"+output.toString()); - // Output the response - Object rebuiltJson = gson.fromJson(output.toString(), outputClass); - return rebuiltJson; - } - - // performs a simple transformation to a json object - public static String toJSon(Object obj) { - Gson gson = new Gson(); - String jsonString = gson.toJson(obj); - return jsonString; - } - - public static Object postJSonData(String endpoint, Object obj, Type outputClass) throws Exception { - - HttpURLConnection urlc = null; - try { - - // Send the request - URL url = new URL(endpoint); - urlc = (HttpURLConnection) url.openConnection(); - try { - urlc.setRequestMethod("POST"); - } catch (ProtocolException e) { - throw new Exception("Error in HttpURLConnection", e); - } - urlc.setDoOutput(true); - urlc.setDoInput(true); - urlc.setUseCaches(false); - urlc.setAllowUserInteraction(false); - urlc.setRequestProperty("Content-type", "application/json; charset=" + "UTF-8"); - OutputStreamWriter writer = null; - Gson gson = new Gson(); - - if (obj != null) { - OutputStream out = urlc.getOutputStream(); - writer = new OutputStreamWriter(out); - // write parameters - String jsonString = gson.toJson(obj); - AnalysisLogger.getLogger().trace("INPUT JSON:\n" + jsonString); - writer.write(jsonString); - writer.flush(); - } - - // Get the response - StringBuffer answer = new StringBuffer(); - BufferedReader reader = new BufferedReader(new InputStreamReader(urlc.getInputStream())); - String line; - while ((line = reader.readLine()) != null) { - answer.append(line); - } - - if (obj != null) - writer.close(); - - reader.close(); - // AnalysisLogger.getLogger().debug("OUTPUT JSON:\n"+answer.toString()); - // Output the response - Object rebuiltJson = gson.fromJson(answer.toString(), outputClass); - return rebuiltJson; - - } catch (Exception ex) { - ex.printStackTrace(); - } - return null; - } - - /** - * Pipes everything from the reader to the writer via a buffer - */ - private static void pipe(Reader reader, Writer writer) throws IOException { - char[] buf = new char[1024]; - int read = 0; - while ((read = reader.read(buf)) >= 0) { - writer.write(buf, 0, read); - } - writer.flush(); - } - - public static String sendPostRequest(String endpoint, String requestParameters) { - - // Build parameter string - String data = requestParameters; - try { - - // Send the request - URL url = new URL(endpoint); - URLConnection conn = url.openConnection(); - - conn.setDoOutput(true); - OutputStreamWriter writer = new OutputStreamWriter(conn.getOutputStream()); - - // write parameters - writer.write(data); - writer.flush(); - - // Get the response - StringBuffer answer = new StringBuffer(); - BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream())); - String line; - while ((line = reader.readLine()) != null) { - answer.append(line); - } - writer.close(); - reader.close(); - - // Output the response - return answer.toString(); - - } catch (MalformedURLException ex) { - ex.printStackTrace(); - } catch (IOException ex) { - ex.printStackTrace(); - } - return null; - } - - public static String ManageCDATA(String phrase) { - - return phrase.replace("", ""); - - } - - public static String AddCDATA(String phrase) { - - return ""; - - } - - public static int checkUrl(String url, final String username, final String password) { - int checkConn = -1; - try { - if ((username != null) && (password != null)) { - Authenticator.setDefault(new Authenticator() { - @Override - protected PasswordAuthentication getPasswordAuthentication() { - return new PasswordAuthentication(username, password.toCharArray()); - } - - }); - } - - URL checkurl = new URL(url); - HttpURLConnection conn = (HttpURLConnection) checkurl.openConnection(); - checkConn = conn.getResponseCode(); - conn.disconnect(); - } catch (Exception e) { - System.out.println("ERROR in URL " + e.getMessage()); - } - return checkConn; - } - - public static void main(String[] args) { - - String url = "http://geoserver-dev.d4science-ii.research-infrastructures.eu/geoserver/rest/layergroups/group4402c0cff-27e3-4606-a2f1-993ad37c3dfb.json"; - int d = checkUrl(url, "admin", "gcube@geo2010"); - System.out.println(d); - } -} \ No newline at end of file diff --git a/src/org/gcube/contentmanagement/graphtools/utils/MathFunctions.java b/src/org/gcube/contentmanagement/graphtools/utils/MathFunctions.java deleted file mode 100644 index 27b23c6..0000000 --- a/src/org/gcube/contentmanagement/graphtools/utils/MathFunctions.java +++ /dev/null @@ -1,253 +0,0 @@ -package org.gcube.contentmanagement.graphtools.utils; - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.List; - -import org.gcube.portlets.user.timeseries.charts.support.types.Point; - -public class MathFunctions { - - /** - * @param args - */ - public static void main(String[] args) { - /* - double[] a = logSubdivision(1,4874,5); - for (int i =0;i generateRandoms(int numberOfRandoms, int min, int max) { - - ArrayList randomsSet = new ArrayList(); - // if number of randoms is equal to -1 generate all numbers - if (numberOfRandoms == -1) { - for (int i = min; i < max; i++) { - randomsSet.add(i); - } - } else { - int numofrandstogenerate = 0; - if (numberOfRandoms <= max) { - numofrandstogenerate = numberOfRandoms; - } else { - numofrandstogenerate = max; - } - - if (numofrandstogenerate == 0) { - randomsSet.add(0); - } else { - for (int i = 0; i < numofrandstogenerate; i++) { - - int RNum = -1; - RNum = (int) ((max) * Math.random()) + min; - - // generate random number - while (randomsSet.contains(RNum)) { - RNum = (int) ((max) * Math.random()) + min; - // AnalysisLogger.getLogger().debug("generated " + RNum); - } - - // AnalysisLogger.getLogger().debug("generated " + RNum); - - if (RNum >= 0) - randomsSet.add(RNum); - } - - } - } - - return randomsSet; - } - - public static int[] generateSequence(int elements) { - int[] sequence = new int[elements]; - for (int i = 0; i < elements; i++) { - sequence[i] = i; - } - return sequence; - } - - public static BigInteger chunk2Index(int chunkIndex, int chunkSize) { - - return BigInteger.valueOf(chunkIndex).multiply(BigInteger.valueOf(chunkSize)); - - } - - // calculates mean - public static double mean(double[] p) { - double sum = 0; // sum of all the elements - for (int i = 0; i < p.length; i++) { - sum += p[i]; - } - return sum / p.length; - }// end method mean - - //calculates normalized derivative - public static double[] derivative(double[] a) { - double[] d = new double[a.length]; - double max = 1; - if (a.length > 0) { - for (int i = 0; i < a.length; i++) { - double current = a[i]; - double previous = current; - if (i > 0) { - previous = a[i - 1]; - } - d[i] = current - previous; - if (Math.abs(d[i])>max) - max = Math.abs(d[i]); - // System.out.println("point "+a[i]+" derivative "+d[i]); - } - - //normalize - for (int i = 0; i < a.length; i++) { - d[i] = d[i]/max; - } - } - - return d; - } - - // returns a list of spikes indexes - public static boolean[] findSpikes(double[] derivative,double threshold) { - boolean[] d = new boolean[derivative.length]; - - if (d.length > 0) { - d[0] = false; - for (int i = 1; i < derivative.length - 1; i++) { - if (derivative[i] / derivative[i + 1] < 0){ -// double ratio = Math.abs((double) derivative[i]/ (double) derivative[i+1]); -// System.out.println("RATIO "+i+" "+Math.abs(derivative[i])); -// if ((threshold>0)&&(ratio0)&&(Math.abs(derivative[i])>threshold)) - d[i] = true; - } - else - d[i] = false; - } - d[derivative.length - 1] = false; - } - - return d; - } - - // returns a list of spikes indexes - public static boolean[] findSpikes(double[] derivative) { - return findSpikes(derivative,-1); - } - - // transforms a list of points for a series in a double vector of y values - // it applies ONLY to transposed graphs not to extracted list of points (see GraphSamplesTable) - public static double[] points2Double(List> pointslist, int seriesIndex, int numbOfPoints) { - - double[] points = new double[numbOfPoints]; - // System.out.print("points: "); - for (int y = 0; y < numbOfPoints; y++) { - double value = pointslist.get(seriesIndex).getEntries().get(y).getValue().doubleValue(); - points[y] = value; - // System.out.print(value+" "); - } - - return points; - } - - // searches for an index into an array - public static boolean isIn(List indexarray, int index) { - - int size = indexarray.size(); - - for (int i = 0; i < size; i++) { - if (index == indexarray.get(i).intValue()) - return true; - } - - return false; - } - - - // finds the indexes of zero points - public static List findZeros(double[] points) { - - int size = points.length; - ArrayList zeros = new ArrayList(); - - for (int i = 0; i < size; i++) { - if (points[i]==0){ - int start = i; - int end = i; - - for (int j=i+1;j0){ - - double difference = logEnd-logStart; - step = (difference/(double)numberOfParts); - - } -// double [] points = new double[numberOfParts+1]; - double[] linearpoints = new double[numberOfParts+1]; - - for (int i=0;i elementlist) { - - // 0 = String 1 = Boolean 2 = Decimal - int[] scores = new int[3]; - String[] types = { String.class.getName(), Boolean.class.getName(), BigDecimal.class.getName() }; - for (String element : elementlist) { - Object guessedObj = guessType(element); - if (guessedObj instanceof String) { - scores[0] = scores[0] + 1; - } else if (guessedObj instanceof Boolean) { - scores[1] = scores[1] + 1; - } else if (guessedObj instanceof BigDecimal) { - scores[2] = scores[2] + 1; - } - - } - int max = -1; - int maxindex = -1; - for (int i = 0; i < scores.length; i++) { - if (scores[i] > max) { - max = scores[i]; - maxindex = i; - } - } - -// System.out.println("index " + maxindex + " max " + max); - - String type = types[maxindex]; - - return type; - } - - public static void main(String[] args) throws ClassNotFoundException { - - ArrayList prova = new ArrayList(); - for (int i = 0; i < 5; i++) { - prova.add("1234"); - } - - String classtype = guessType(prova); - System.out.println(classtype); - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/core/Engine.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/core/Engine.java deleted file mode 100644 index e215c00..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/core/Engine.java +++ /dev/null @@ -1,350 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.core; - - -import java.util.ArrayList; -import java.util.HashMap; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.CategoryOrderedList; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.CategoryScores; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.ChunkSet; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.ReferenceChunk; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.ReferenceChunkSet; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.SetOfReferenceChunkSet; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.SingletonChunkSet; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.TimeSeriesChunk; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks.TimeSeriesChunkSet; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; -import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; -import org.hibernate.SessionFactory; - -public class Engine { - - private String ConfigurationFileNameLocal = "hibernate.cfg.xml"; - private SessionFactory referenceDBSession; - - public ArrayList bestCategories; - public ArrayList bestScores; - public ArrayList bestColumns; - public HashMap scoresTable; - public String columnFilter; - private LexicalEngineConfiguration config; - private TimeSeriesChunk singletonChunk; - - public ArrayList getSingletonMatches(){ - return singletonChunk.getDetailedResults(); - } - - public String getSingletonElement(){ - return singletonChunk.getSingletonEntry(); - } - - public SessionFactory getDBSession() throws Exception { - - if (referenceDBSession == null) { - referenceDBSession = DatabaseFactory.initDBConnection(ConfigurationFileNameLocal); - } - - return referenceDBSession; - } - - public SessionFactory getDBSession(LexicalEngineConfiguration externalConf) throws Exception { - - if (referenceDBSession == null) { - referenceDBSession = DatabaseFactory.initDBConnection(ConfigurationFileNameLocal, externalConf); - } - - return referenceDBSession; - } - - public void resetEngine(LexicalEngineConfiguration Config,String ColumnFilter,String configPath){ - config = Config; - scoresTable = new HashMap(); - bestCategories = new ArrayList(); - bestColumns = new ArrayList(); - bestScores = new ArrayList(); - columnFilter = ColumnFilter; -// ConfigurationFileNameLocal = configPath+"/"+ConfigurationFileNameLocal; - } - - public Engine(LexicalEngineConfiguration Config,String ColumnFilter,String configPath) { - config = Config; - scoresTable = new HashMap(); - bestCategories = new ArrayList(); - bestColumns = new ArrayList(); - bestScores = new ArrayList(); - columnFilter = ColumnFilter; - ConfigurationFileNameLocal = configPath+"/"+ConfigurationFileNameLocal; - } - - public void calcLike(CategoryOrderedList col, String unknownSeriesName, String unknownSeriesColumn) { - scoresTable = col.getScoresTable(); - - // take a time series set of chunks - TimeSeriesChunkSet tsChunkSet = null; - try { - tsChunkSet = new TimeSeriesChunkSet(config.TimeSeriesChunksToTake, config.chunkSize, unknownSeriesName, unknownSeriesColumn,config, this); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("Engine->calcLike-> ERROR could not retrieve time series chunks " + e.getLocalizedMessage()); - } - // if we took the ts chunk set correctly perform calculation - if (tsChunkSet != null) { - - // generate the set of reference chunks - SetOfReferenceChunkSet setRefChunksSet = new SetOfReferenceChunkSet(col.getOrderedList(),config, this); - - TimeSeriesChunk tsChunk = tsChunkSet.nextChunk(); - // for all ts chunks - while (tsChunk != null) { - - // take a set of chunks from a reference category - ReferenceChunkSet refChunkSet = setRefChunksSet.getNextChunkSet(); - while (refChunkSet != null) { - // take a chunk in the reference chunk set - ReferenceChunk refChunk = refChunkSet.nextChunk(); - while (refChunk != null) { - - try { - tsChunk.compareToReferenceChunk(scoresTable, refChunk); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("Engine->calcLike-> ERROR could not compare time series chunk with reference chunk " + e.getLocalizedMessage()); - } - // take another chunk in the reference chunk set - refChunk = refChunkSet.nextChunk(); - } - - // check score - UpdateScores(refChunkSet.getSeriesName(),false); - - // take another set of chunks from another reference category - refChunkSet = setRefChunksSet.getNextChunkSet(); - } - - tsChunk = tsChunkSet.nextChunk(); - } - - } - - } - - boolean threadActivity[]; - - private void wait4Thread(int index){ - - - // wait until thread is free - while (threadActivity[index]) { - try { - Thread.sleep(10); - } catch (InterruptedException e) { - } - } - - - } - - private void startNewTCalc(TimeSeriesChunk tsChunk, ReferenceChunkSet refChunkSet,int index){ - - threadActivity[index] = true; - ThreadCalculator tc = new ThreadCalculator(tsChunk, refChunkSet,index); - Thread t = new Thread(tc); - t.start(); -// AnalysisLogger.getLogger().info("ThreadCalculator<-go "+index); - } - - - public void calcLikeThread(CategoryOrderedList col, String unknownSeriesName, String unknownSeriesColumn,String singletonString) { - scoresTable = col.getScoresTable(); - - // take a time series set of chunks - ChunkSet tsChunkSet = null; - int[] currentThreads = MathFunctions.generateSequence(config.numberOfThreadsToUse); - int currentThread = 0; - threadActivity = new boolean [currentThreads.length]; - //initialize to false; - for (int j=0;jcalcLike-> ERROR could not retrieve time series chunks " + e.getLocalizedMessage()); - } - // if we took the ts chunk set correctly perform calculation - if (tsChunkSet != null) { - - // generate the set of reference chunks - SetOfReferenceChunkSet setRefChunksSet = new SetOfReferenceChunkSet(col.getOrderedList(),config, this); - - TimeSeriesChunk tsChunk = (TimeSeriesChunk)tsChunkSet.nextChunk(); - - AnalysisLogger.getLogger().debug("tsChunk is null "+(tsChunk != null)); - // for all ts chunks - while (tsChunk != null) { - - // take a set of chunks from a reference category - ReferenceChunkSet refChunkSet = setRefChunksSet.getNextChunkSet(); - while (refChunkSet != null) { - wait4Thread(currentThreads[currentThread]); - startNewTCalc(tsChunk, refChunkSet,currentThreads[currentThread]); - -// makeComparisonsTSChunk2RefChunks(tsChunk, refChunkSet); - - // take another set of chunks from another reference category - refChunkSet = setRefChunksSet.getNextChunkSet(); - - currentThread++; - if (currentThread >= currentThreads.length) - currentThread = 0; - } - - - //if the chunk is a singleton, don't process other and record the result - if (tsChunk.isSingleton()){ - singletonChunk = tsChunk; - - break; - } - - tsChunk = (TimeSeriesChunk)tsChunkSet.nextChunk(); - } - - //wait for last threads to finish - for (int i : currentThreads) { - // free previous calculation - wait4Thread(i); - } - - } - - } - - private void makeComparisonsTSChunk2RefChunks(TimeSeriesChunk tsChunk, ReferenceChunkSet refChunkSet) { - - // take a chunk in the reference chunk set - ReferenceChunk refChunk = refChunkSet.nextChunk(); - while (refChunk != null) { - - try { - tsChunk.compareToReferenceChunk(scoresTable, refChunk,columnFilter); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("Engine->calcLike-> ERROR could not compare time series chunk with reference chunk " + e.getLocalizedMessage()); - } - - //if the TimeSeries chunk states the processing must be interrupted, don't perform other comparisons - if (tsChunk.mustInterruptProcess()) - break; - - // take another chunk in the reference chunk set - refChunk = refChunkSet.nextChunk(); - - } - // check score - UpdateScores(refChunkSet.getSeriesName(),tsChunk.isSingleton()); - } - - private void UpdateScores(String categoryName, boolean singletonMatch) { - - CategoryScores categoryScore = scoresTable.get(categoryName); - ArrayList bestCols = categoryScore.findBestList(); - String bestColumn = null; - double score = 0; - if (bestCols.size() > 0) { - bestColumn = bestCols.get(0); - score = categoryScore.getScore(bestColumn,singletonMatch); - } - - AnalysisLogger.getLogger().trace("Engine->UpdateScores-> \tBEST SUITABLE COLUMN IS: " + bestColumn); - AnalysisLogger.getLogger().trace("Engine->UpdateScores-> \tBEST SCORE IS: " + score); - - // order this column - if (score > config.categoryDiscardThreshold) { - - int index = 0; - // insert at the right point in the classification - for (Double dscore : bestScores) { - if (dscore.doubleValue() < score) { - - break; - } - index++; - } - bestCategories.add(index, categoryName); - bestScores.add(index, score); - bestColumns.add(index, bestColumn); - checkAndAddColumns(categoryScore, bestCols, categoryName,singletonMatch); - } - - } - - private void checkAndAddColumns(CategoryScores scores, ArrayList bestCols, String categoryName,boolean singletonMatch) { - - int size = bestCols.size(); - double bestScore = scores.getScore(bestCols.get(0),singletonMatch); - - for (int i = 1; i < size; i++) { - // take the i-th column - String column = bestCols.get(i); - if (column != null) { - // check the score - double score = scores.getScore(column,singletonMatch); - - // if the score is near the best, add the column - if ((score > 0) && (score >= (bestScore - 0.5 * bestScore))) { - - int index = 0; - // insert at the right point in the classification - for (Double dscore : bestScores) { - if (dscore.doubleValue() < score) { - - break; - } - index++; - } - - // AnalysisLogger.getLogger().info("chechAndAddColumns -> column to add "+column+" category "+categoryName+" with value "+score+" previous "+(bestScore - 0.5 * bestScore)); - bestColumns.add(index,column); - bestScores.add(index,score); - bestCategories.add(index,categoryName); - // AnalysisLogger.getLogger().info("chechAndAddColumns -> "+bestCategories); - } - } - } - - } - - private class ThreadCalculator implements Runnable { - TimeSeriesChunk tsChunk; - ReferenceChunkSet refChunksSet; - int index; - - public ThreadCalculator(TimeSeriesChunk tsChunk, ReferenceChunkSet refChunksSet,int index) { - this.tsChunk = tsChunk; - this.refChunksSet = refChunksSet; - this.index = index; - } - - public void run() { -// AnalysisLogger.getLogger().info("ThreadCalculator->started "+index); - makeComparisonsTSChunk2RefChunks(tsChunk, refChunksSet); - threadActivity[index]=false; -// AnalysisLogger.getLogger().info("ThreadCalculator>-finished "+index); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/core/LexicalEngineConfiguration.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/core/LexicalEngineConfiguration.java deleted file mode 100644 index ae9e418..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/core/LexicalEngineConfiguration.java +++ /dev/null @@ -1,322 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.core; - -import java.io.FileInputStream; -import java.util.Properties; - - -public class LexicalEngineConfiguration { - - public void configure(String absoluteFilePath) throws Exception { - Properties props = new Properties(); - FileInputStream fis = new FileInputStream(absoluteFilePath); - props.load(fis); - categoryDiscardThreshold = Float.parseFloat(props.getProperty("categoryDiscardThreshold")); - entryAcceptanceThreshold = Integer.parseInt(props.getProperty("entryAcceptanceThreshold")); - chunkSize = Integer.parseInt(props.getProperty("chunkSize")); - TimeSeriesChunksToTake = Integer.parseInt(props.getProperty("timeSeriesChunksToTake")); - ReferenceChunksToTake = Integer.parseInt(props.getProperty("referenceChunksToTake")); - randomTake = Boolean.parseBoolean(props.getProperty("randomTake")); - useSimpleDistance = Boolean.parseBoolean(props.getProperty("useSimpleDistance")); - numberOfThreadsToUse = Integer.parseInt(props.getProperty("numberOfThreadsToUse")); - categoryDiscardDifferencialThreshold = Float.parseFloat(props.getProperty("categoryDiscardDifferencialThreshold")); - singleEntryRecognitionMaxDeviation = Float.parseFloat(props.getProperty("singleEntryRecognitionMaxDeviation")); - fis.close(); - } - - - - public void setCategoryDiscardThreshold(float categoryDiscardThreshold) { - this.categoryDiscardThreshold = categoryDiscardThreshold; - } - - public float getCategoryDiscardThreshold() { - return categoryDiscardThreshold; - } - - public void setEntryAcceptanceThreshold(float entryAcceptanceThreshold) { - this.entryAcceptanceThreshold = entryAcceptanceThreshold; - } - - public float getEntryAcceptanceThreshold() { - return entryAcceptanceThreshold; - } - - - - public void setCategoryDiscardDifferencialThreshold(float categoryDiscardDifferencialThreshold) { - this.categoryDiscardDifferencialThreshold = categoryDiscardDifferencialThreshold; - } - - public float getCategoryDiscardDifferencialThreshold() { - return categoryDiscardDifferencialThreshold; - } - - public void setChunkSize(int chunkSize) { - this.chunkSize = chunkSize; - } - - public int getChunkSize() { - return chunkSize; - } - - public void setRandomTake(boolean randomTake) { - this.randomTake = randomTake; - } - - public boolean isRandomTake() { - return randomTake; - } - - public void setTimeSeriesChunksToTake(int timeSeriesChunksToTake) { - TimeSeriesChunksToTake = timeSeriesChunksToTake; - } - - public int getTimeSeriesChunksToTake() { - return TimeSeriesChunksToTake; - } - - public void setReferenceChunksToTake(int referenceChunksToTake) { - ReferenceChunksToTake = referenceChunksToTake; - } - - public int getReferenceChunksToTake() { - return ReferenceChunksToTake; - } - - public void setUseSimpleDistance(boolean useSimpleDistance) { - this.useSimpleDistance = useSimpleDistance; - } - - public boolean isUseSimpleDistance() { - return useSimpleDistance; - } - - - public void setNumberOfThreadsToUse(int numberOfThreadsToUse) { - this.numberOfThreadsToUse = numberOfThreadsToUse; - } - - public int getNumberOfThreadsToUse() { - return numberOfThreadsToUse; - } - - public void setSingleEntryRecognitionMaxDeviation(float singleEntryRecognitionMaxDeviation) { - this.singleEntryRecognitionMaxDeviation = singleEntryRecognitionMaxDeviation; - } - - public float getSingleEntryRecognitionMaxDeviation() { - return singleEntryRecognitionMaxDeviation; - } - - public float categoryDiscardThreshold = -Float.MIN_VALUE; - public float entryAcceptanceThreshold = -Float.MIN_VALUE; - public float categoryDiscardDifferencialThreshold = -Float.MIN_VALUE; - public float singleEntryRecognitionMaxDeviation = -Float.MIN_VALUE; - public int chunkSize = -Integer.MIN_VALUE; - public Boolean randomTake = null; - // if set to -1 all chunks will be analyzed - public int TimeSeriesChunksToTake = -Integer.MIN_VALUE; - public int ReferenceChunksToTake = -Integer.MIN_VALUE; - public Boolean useSimpleDistance = null; - public int numberOfThreadsToUse = -Integer.MIN_VALUE; - - //database parameters - public String databaseDriver = null; - public String databaseURL = null; - public String databaseUserName = null; - public String databasePassword = null; - public String databaseDialect = null; - public String databaseIdleConnectionTestPeriod = null; - public String databaseAutomaticTestTable = null; - - //reference data parameters - public String referenceTable = null; - public String referenceColumn = null; - public String idColumn= null; - public String nameHuman = null; - public String description = null; - - public void mergeConfig(LexicalEngineConfiguration config){ - - if (config.getCategoryDiscardDifferencialThreshold()!=-Float.MIN_VALUE) - setCategoryDiscardDifferencialThreshold(config.getCategoryDiscardDifferencialThreshold()); - if (config.getSingleEntryRecognitionMaxDeviation()!=-Float.MIN_VALUE) - setSingleEntryRecognitionMaxDeviation(config.getSingleEntryRecognitionMaxDeviation()); - if (config.getCategoryDiscardThreshold()!=-Float.MIN_VALUE) - setCategoryDiscardThreshold(config.getCategoryDiscardThreshold()); - if (config.getChunkSize()!=-Integer.MIN_VALUE) - setChunkSize(config.getChunkSize()); - if (config.getEntryAcceptanceThreshold()!=-Float.MIN_VALUE) - setEntryAcceptanceThreshold(config.getEntryAcceptanceThreshold()); - if (config.getNumberOfThreadsToUse()!=-Integer.MIN_VALUE) - setNumberOfThreadsToUse(config.getNumberOfThreadsToUse()); - if (config.getReferenceChunksToTake()!=-Integer.MIN_VALUE) - setReferenceChunksToTake(config.getReferenceChunksToTake()); - if (config.getTimeSeriesChunksToTake()!=-Integer.MIN_VALUE) - setTimeSeriesChunksToTake(config.getTimeSeriesChunksToTake()); - if (config.randomTake!= null) - setRandomTake(config.isRandomTake()); - if (config.useSimpleDistance!=null) - setUseSimpleDistance(config.isUseSimpleDistance()); - //database information merge - if (config.databaseDriver!=null) - setDatabaseDriver(config.databaseDriver); - if (config.databaseDialect!=null) - setDatabaseDialect(config.databaseDialect); - if (config.databaseAutomaticTestTable!=null) - setDatabaseAutomaticTestTable(config.databaseAutomaticTestTable); - if (config.databaseIdleConnectionTestPeriod!=null) - setDatabaseIdleConnectionTestPeriod(config.databaseIdleConnectionTestPeriod); - if (config.databaseUserName!=null) - setDatabaseUserName(config.databaseUserName); - if (config.databasePassword!=null) - setDatabasePassword(config.databasePassword); - if (config.databaseURL!=null) - setDatabaseURL(config.databaseURL); - if (config.referenceTable!=null) - setReferenceTable(config.referenceTable); - if (config.referenceColumn!=null) - setReferenceColumn(config.referenceColumn); - if (config.idColumn!=null) - setIdColumn(config.idColumn); - if (config.nameHuman!=null) - setNameHuman(config.nameHuman); - if (config.description!=null) - setDescription(config.description); - } - - - - public void setDatabaseDriver(String databaseDriver) { - this.databaseDriver = databaseDriver; - } - - - - public String getDatabaseDriver() { - return databaseDriver; - } - - - - public void setDatabaseURL(String databaseURL) { - this.databaseURL = databaseURL; - } - - - - public String getDatabaseURL() { - return databaseURL; - } - - - - public void setDatabaseUserName(String databaseUserName) { - this.databaseUserName = databaseUserName; - } - - - - public String getDatabaseUserName() { - return databaseUserName; - } - - - - public void setDatabasePassword(String databasePassword) { - this.databasePassword = databasePassword; - } - - - - public String getDatabasePassword() { - return databasePassword; - } - - - - public void setDatabaseDialect(String databaseDialect) { - this.databaseDialect = databaseDialect; - } - - - - public String getDatabaseDialect() { - return databaseDialect; - } - - - - public void setDatabaseIdleConnectionTestPeriod(String databaseIdleConnectionTestPeriod) { - this.databaseIdleConnectionTestPeriod = databaseIdleConnectionTestPeriod; - } - - - - public String getDatabaseIdleConnectionTestPeriod() { - return databaseIdleConnectionTestPeriod; - } - - - - public void setDatabaseAutomaticTestTable(String databaseAutomaticTestTable) { - this.databaseAutomaticTestTable = databaseAutomaticTestTable; - } - - - - public String getDatabaseAutomaticTestTable() { - return databaseAutomaticTestTable; - } - - public String getReferenceTable() { - return referenceTable; - } - - public void setReferenceTable(String referenceTable) { - this.referenceTable = referenceTable; - } - - public String getReferenceColumn() { - return referenceColumn; - } - - public void setReferenceColumn(String referenceColumn) { - this.referenceColumn = referenceColumn; - } - - public String getIdColumn() { - return idColumn; - } - - - - public void setIdColumn(String idColumn) { - this.idColumn = idColumn; - } - - - - public String getNameHuman() { - return nameHuman; - } - - - - public void setNameHuman(String nameHuman) { - this.nameHuman = nameHuman; - } - - - - public String getDescription() { - return description; - } - - - - public void setDescription(String description) { - this.description = description; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example1_Species.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example1_Species.java deleted file mode 100644 index 0e8a6e8..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example1_Species.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.examples; - - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class Example1_Species { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6"; - String column = "field1"; - String correctFamily = "SPECIES"; - String correctColumn = "SCIENTIFIC_NAME"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example2_Area.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example2_Area.java deleted file mode 100644 index 86e6251..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example2_Area.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.examples; - - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class Example2_Area { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6"; - String column = "field3"; - String correctFamily = "AREA"; - String correctColumn = "NAME_EN"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example3_SingleMatchShark.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example3_SingleMatchShark.java deleted file mode 100644 index 2bd8a3b..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example3_SingleMatchShark.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.examples; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - - -public class Example3_SingleMatchShark { - - public static void main(String[] args) { - - try { - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String singleton = "shark"; - String family = "species"; - String column = "name_en"; - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - - //CHANGE THIS TO ENHANCE THE RECALL - conf.setEntryAcceptanceThreshold(30); - conf.setReferenceChunksToTake(-1); - conf.setTimeSeriesChunksToTake(-1); - conf.setUseSimpleDistance(false); - - guesser.runGuesser(configPath, singleton, conf, family,column ); - ArrayList detailedResults = guesser.getDetailedMatches(); - - AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton); - - CategoryGuesser.showResults(detailedResults); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example4_SingleMatchMitella.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example4_SingleMatchMitella.java deleted file mode 100644 index 68f3ce6..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example4_SingleMatchMitella.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.examples; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - - -public class Example4_SingleMatchMitella { - - public static void main(String[] args) { - - try { - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String singleton = "Mitella pollicipes"; -// String singleton = "policipes"; - String family = "species"; - String column = "scientific_name"; - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - - //CHANGE THIS TO ENHANCE THE RECALL - conf.setEntryAcceptanceThreshold(30); - conf.setReferenceChunksToTake(-1); - conf.setTimeSeriesChunksToTake(-1); - conf.setUseSimpleDistance(false); - - guesser.runGuesser(configPath, singleton, conf, family,column ); - ArrayList detailedResults = guesser.getDetailedMatches(); - - AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton); - - CategoryGuesser.showResults(detailedResults); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example5_SingleMatchMitella.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example5_SingleMatchMitella.java deleted file mode 100644 index e1bb6b7..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/Example5_SingleMatchMitella.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.examples; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - - -public class Example5_SingleMatchMitella { - - public static void main(String[] args) { - - try { - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String singleton = "Mirella policepes"; - String family = "species"; - String column = "scientific_name"; - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - - //CHANGE THIS TO ENHANCE THE RECALL - conf.setEntryAcceptanceThreshold(30); - conf.setReferenceChunksToTake(-1); - conf.setTimeSeriesChunksToTake(-1); - conf.setUseSimpleDistance(false); - - guesser.runGuesser(configPath, singleton, conf, family,column ); - ArrayList detailedResults = guesser.getDetailedMatches(); - - AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton); - - CategoryGuesser.showResults(detailedResults); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/ExampleGuessingExternalCfg.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/ExampleGuessingExternalCfg.java deleted file mode 100644 index d652383..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/examples/ExampleGuessingExternalCfg.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.examples; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; - -public class ExampleGuessingExternalCfg { - - public static void main(String[] args) { - - try { - - String configPath = "./"; - CategoryGuesser guesser = new CategoryGuesser(configPath); - - - //bench 1 - System.out.println("----------------------BENCH 1-------------------------"); - String seriesName = "import_532bba80_1c8f_11df_a4ee_87804054691e"; - String column = "field2"; - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - conf.setCategoryDiscardDifferencialThreshold(10); - conf.setCategoryDiscardThreshold(0); - conf.setChunkSize(25); - conf.setEntryAcceptanceThreshold(50); - conf.setNumberOfThreadsToUse(2); - conf.setRandomTake(true); - conf.setReferenceChunksToTake(20); - conf.setTimeSeriesChunksToTake(1); - conf.setUseSimpleDistance(false); - - //database Parameters - conf.setDatabaseUserName("root"); - conf.setDatabasePassword("ash_ash80"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - //reference parameters - conf.setReferenceTable("reference_table"); - conf.setReferenceColumn("table_name"); - conf.setIdColumn("id"); - conf.setNameHuman("name_human"); - conf.setDescription("description"); - - guesser.init(conf); - - guesser.runGuesser(seriesName, column, conf); - ArrayList results = guesser.getClassification(); - CategoryGuesser.showResults(results); - - System.out.println("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/Category.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/Category.java deleted file mode 100644 index 4981482..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/Category.java +++ /dev/null @@ -1,71 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - - -import java.math.BigInteger; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.interfaces.Reference; - -public class Category implements Reference { - - - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - private String categoryName; - private String categoryIndex; - private String tableName; - private String description; - private BigInteger numberOfElements; - - public Category(String name,String index,String tablename,String descr){ - categoryName=name; - categoryIndex=index; - tableName=tablename; - description=descr; - } - - public void setName(String categoryName) { - this.categoryName = categoryName; - } - - public String getName() { - return categoryName; - } - - public void setIndex(String categoryIndex) { - this.categoryIndex = categoryIndex; - } - - public String getIndex() { - return categoryIndex; - } - - public void setTableName(String tableName) { - this.tableName = tableName; - } - - public String getTableName() { - return tableName; - } - - public void setDescription(String description) { - this.description = description; - } - - public String getDescription() { - return description; - } - - public String toString(){ - return "["+categoryName+": index "+categoryIndex+" table "+tableName+" description "+description+"]"; - } - - public void setNumberOfElements(BigInteger numberOfElements) { - this.numberOfElements = numberOfElements; - } - - public BigInteger getNumberOfElements() { - return numberOfElements; - } -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryOrderedList.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryOrderedList.java deleted file mode 100644 index 826c5ae..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryOrderedList.java +++ /dev/null @@ -1,79 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.HashMap; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.interfaces.Reference; - -public class CategoryOrderedList { - - // lista ordinata in ordine decrescente - ArrayList orderedList; - HashMap orderedListTable; - private HashMap scoresTable; - - - public void setOrderedList(ArrayList OrderedList){ - orderedList = OrderedList; - } - public HashMap getScoresTable() { - return scoresTable; - } - - public void setCategoryTable( HashMap OrderedListTable ) { - orderedListTable = OrderedListTable ; - } - - public Reference getCategory ( String categoryName ) { - return orderedListTable.get(categoryName); - } - - public ArrayList getOrderedList() { - return orderedList; - } - - LexicalEngineConfiguration config; - - public CategoryOrderedList(LexicalEngineConfiguration Config) { - orderedList = new ArrayList(); - scoresTable = new HashMap(); - config = Config; - orderedListTable = new HashMap(); - } - - public void addCategory(Category c) { - - BigInteger nElements = c.getNumberOfElements(); - int index = 0; - - for (Reference cc : orderedList) { - BigInteger localnum = cc.getNumberOfElements(); - if (localnum.compareTo(nElements) < 0) { - break; - } - index++; - } - orderedList.add(index, c); - scoresTable.put(c.getName(), new CategoryScores(c.getNumberOfElements(),config)); - orderedListTable.put(c.getName(), c); -// scoresTable.put(c.getName(), new CategoryScores()); - } - - public CategoryOrderedList generateNovelList(){ - CategoryOrderedList newCatList = new CategoryOrderedList(config); - newCatList.setOrderedList(orderedList); - newCatList.setCategoryTable(orderedListTable); - - for (String key:scoresTable.keySet()){ - CategoryScores ct = scoresTable.get(key); - CategoryScores ctnew = new CategoryScores(ct.getCategoryElements(), config); - newCatList.getScoresTable().put(key,ctnew); - } - - return newCatList; - } - -} \ No newline at end of file diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryScores.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryScores.java deleted file mode 100644 index 285b790..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryScores.java +++ /dev/null @@ -1,205 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.HashMap; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; - -//score relative to a certain category and column - -public class CategoryScores { - - // column names vs percentage - private HashMap columnsScore; - - private int matchedElements; - private BigInteger maxElements; - private BigInteger categoryElements; - private LexicalEngineConfiguration config; - - public CategoryScores(BigInteger catElements, LexicalEngineConfiguration Config) { - columnsScore = new HashMap(); - matchedElements = 0; - setCategoryElements(catElements); - config = Config; - maxElements = calculateMaxElements(catElements); - } - - public double calculateCoverage(){ - - double bd = new BigDecimal(matchedElements).divide(new BigDecimal(maxElements), 2, BigDecimal.ROUND_FLOOR).doubleValue(); - - //lower poor categories - if (maxElements.compareTo(BigInteger.valueOf(config.chunkSize))<=0) - bd = bd *0.8; - - //To-DO take into observation!!! - //higher very big set coverage - if (categoryElements.compareTo(BigInteger.valueOf(10000))>0) - bd = Math.max(0.01, bd); - - return bd; - } - - private BigInteger calculateMaxElements(BigInteger catElements){ - BigInteger maxElements = BigInteger.ZERO; - - int maxNumberOfChunks = config.ReferenceChunksToTake; - int chunkSize = config.chunkSize; - int numberofcycles=0; - - if (maxNumberOfChunks<0) - return catElements; - try{ - BigDecimal intcycles; - BigDecimal oddcycles; - BigDecimal catElementsDecimal = new BigDecimal(catElements); - BigDecimal[] arraydecimal = catElementsDecimal.divideAndRemainder(new BigDecimal(BigInteger.valueOf(chunkSize))); - intcycles = arraydecimal[0]; - oddcycles = arraydecimal[1]; - numberofcycles = intcycles.intValue(); - if ((numberofcycles==0)&&(oddcycles.intValue() > 0)) { - numberofcycles = numberofcycles + 1; - maxElements = oddcycles.toBigInteger(); - } - else{ - if (numberofcycles>maxNumberOfChunks) - numberofcycles = maxNumberOfChunks; - - maxElements = BigInteger.valueOf(chunkSize).multiply(BigInteger.valueOf(numberofcycles)); - } - - }catch(Exception e){} - - return maxElements; - } - - - public String showScores(){ - return columnsScore.toString()+":"+calculateCoverage(); //+" - "+matchedElements+" vs "+maxElements; - } - - public void incrementScore(String columnName,float increment,boolean doIncrementMathes) { - - Float score = columnsScore.get(columnName); - - if (score==null) - score =new Float(0); - - score = MathFunctions.incrementPerc(score, increment, matchedElements); - - if (doIncrementMathes) - matchedElements ++; - - columnsScore.put(columnName, score); - } - - - public float getScore(String columnName,boolean simpleMatch) { - - if (simpleMatch){ - return getSimpleScore(columnName); - } - else - return getScore(columnName); - } - - - public float getScore(String columnName) { - - Float score = null; - try { -// score = columnsScore.get(columnName)*(float)calculateCoverage(); - score = columnsScore.get(columnName); - if (score!=null){ - return score*(float)calculateCoverage(); - } - } catch (Exception e) { - } - return score; - - } - - public float getSimpleScore(String columnName) { - - Float score = null; - try { -// score = columnsScore.get(columnName)*(float)calculateCoverage(); - score = columnsScore.get(columnName); - if (score!=null){ - return score; - } - } catch (Exception e) { - } - return score; - - } - - // take the best performing column - public String findBest() { - - String bestCol = null; - Float bestscore = Float.valueOf(-1); - - for (String column : columnsScore.keySet()) { - - Float score = new Float(0); - try { - score = columnsScore.get(column); - } catch (Exception e) { - AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); - } - if (bestscore.compareTo(score) < 0) { - bestscore = score; - bestCol = column; - } - } - - return bestCol; - } - - // take the best performing columns - public ArrayList findBestList() { - - ArrayList bestCols = new ArrayList(); - - for (String column : columnsScore.keySet()) { - - Float score = new Float(0); - - try { - score = columnsScore.get(column); - } catch (Exception e) { - AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); - } - - // find best place where to put column - int size = bestCols.size(); - int index = size; - for (int i = 0; i < size; i++) { - if (columnsScore.get(bestCols.get(i)).compareTo(score) <= 0) { - index = i; - break; - } - } - bestCols.add(index, column); - - } - - return bestCols; - } - - public void setCategoryElements(BigInteger categoryElements) { - this.categoryElements = categoryElements; - } - - public BigInteger getCategoryElements() { - return categoryElements; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryScoresOld.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryScoresOld.java deleted file mode 100644 index 4f39044..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/CategoryScoresOld.java +++ /dev/null @@ -1,123 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.HashMap; - -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -//score relative to a certain category and column - -public class CategoryScoresOld { - - // column names vs percentage - private HashMap columnsScore; - - private BigDecimal maximumElements; - - public CategoryScoresOld(BigInteger maxelements) { - this.maximumElements = new BigDecimal(maxelements); - columnsScore = new HashMap(); - } - - public void setMaximumElements(BigDecimal MaximumElements) { - maximumElements = MaximumElements; - } - - public void incrementScore(String columnName,float increment) { - - BigDecimal score = columnsScore.get(columnName); - - BigDecimal reciproc = BigDecimal.valueOf(increment); - - if (score == null) { - // build up a new score : 1/TOTAL - score = reciproc; - } else { - score = score.add(reciproc); - } - columnsScore.put(columnName, score); - // AnalysisLogger.getLogger().debug("CategoryOrderedList->checkUnkEntriesOnEntireCategory-> SCORE "+score); - } - - public double getScore(String columnName) { - - double score = 0; - try { - - BigDecimal percentage = columnsScore.get(columnName); - try { - if (percentage == null) - percentage = BigDecimal.ZERO; - - AnalysisLogger.getLogger().trace("getScore -> Score for "+columnName+": " + percentage + " vs " + maximumElements); - percentage = percentage.divide(maximumElements, 2, BigDecimal.ROUND_DOWN); - } catch (ArithmeticException e) { - percentage = BigDecimal.ZERO; - e.printStackTrace(); - } - - score = percentage.doubleValue(); - } catch (Exception e) { - } - return score; - - } - - // take the best performing column - public String findBest() { - - String bestCol = null; - BigDecimal bestscore = BigDecimal.valueOf(-1); - - for (String column : columnsScore.keySet()) { - - BigDecimal score = BigDecimal.ZERO; - try { - score = columnsScore.get(column); - } catch (Exception e) { - AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); - } - if (bestscore.compareTo(score) < 0) { - bestscore = score; - bestCol = column; - } - } - - return bestCol; - } - - // take the best performing columns - public ArrayList findBestList() { - - ArrayList bestCols = new ArrayList(); - - for (String column : columnsScore.keySet()) { - - BigDecimal score = BigDecimal.ZERO; - - try { - score = columnsScore.get(column); - } catch (Exception e) { - AnalysisLogger.getLogger().error("ERROR in getting SCORE " + e.getLocalizedMessage()); - } - - // find best place where to put column - int size = bestCols.size(); - int index = size; - for (int i = 0; i < size; i++) { - if (columnsScore.get(bestCols.get(i)).compareTo(score) <= 0) { - index = i; - break; - } - } - bestCols.add(index, column); - - } - - return bestCols; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/DBObjectTranslator.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/DBObjectTranslator.java deleted file mode 100644 index 7dcbf7c..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/DBObjectTranslator.java +++ /dev/null @@ -1,272 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.List; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; -import org.hibernate.SessionFactory; - -public class DBObjectTranslator { - - public static void main(String[] args) { - - } - - public ArrayList relations; - public ArrayList categories; - - public BigInteger totalEntries; - public BigInteger totalCatElements; - public BigInteger totalRelationElements; - - public DBObjectTranslator() { - relations = new ArrayList(); - categories = new ArrayList(); - totalCatElements = BigInteger.ZERO; - totalRelationElements = BigInteger.ZERO; - totalEntries = BigInteger.ZERO; - } - - public BigInteger calculateTotalEntries(SessionFactory dbSession, String timeSeriesName, String timeSeriesColumn) { - - BigInteger count = BigInteger.ZERO; - String query = "select count(*) from (SELECT distinct " + timeSeriesColumn + " FROM " + timeSeriesName + ") r;"; - // String query = "SELECT count(*) FROM " + timeSeriesName.toLowerCase(); - - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - - for (Object result : resultSet) { - - try { - BigInteger resultcount = (BigInteger) result; - totalEntries = totalEntries.add(resultcount); - count = resultcount; - AnalysisLogger.getLogger().trace("DBObjectTranslator->calculateTotalEntries: Time Series " + timeSeriesName + " total " + totalEntries); - } catch (Exception e) { - } - } - - return count; - } - - public ArrayList retrieveTimeSeriesEntries(SessionFactory dbSession, String timeSeriesName, String timeSeriesColumn, BigInteger min, int numberOfElements) { - - // String query = "SELECT distinct "+timeSeriesColumn+" FROM "+timeSeriesName+" r limit "+min+","+numberOfElements; - String query = "SELECT distinct " + timeSeriesColumn + " FROM " + timeSeriesName + " r limit " + numberOfElements + " offset " + min; - AnalysisLogger.getLogger().trace("DBObjectTranslator->query: " + query); - - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - ArrayList column = new ArrayList(); - - for (Object result : resultSet) { - try { - String value = ""; - if (result != null) - value = result.toString(); - - column.add(value); - - // AnalysisLogger.getLogger().debug("DBObjectTranslator->retrieveColumnRange: Column Element Added " + value); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveTimeSeriesEntries: Error in adding entry :" + e.getLocalizedMessage()); - } - } - - AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveColumnRange: Column " + column.toString()); - - return column; - } - - public ArrayList retrieveEntries(SessionFactory dbSession, String timeSeriesName, BigInteger min, int numberOfElements) { - - // clean previous entries - ArrayList currentEntries = new ArrayList(); - - ArrayList descriptions = new ArrayList(); - ArrayList types = new ArrayList(); - /* - * SELECT table_name,ordinal_position,column_name,data_type, is_nullable,character_maximum_length FROM information_schema.COLUMNS WHERE table_name ='ref_area'; - */ - - String queryDesc = "SELECT table_name,ordinal_position,column_name,data_type, is_nullable,character_maximum_length FROM information_schema.COLUMNS WHERE table_name ='" + timeSeriesName.toLowerCase() + "'"; - - List resultSetDesc = DatabaseFactory.executeSQLQuery(queryDesc, dbSession); - for (Object result : resultSetDesc) { - Object[] resultArray = (Object[]) result; - descriptions.add((String) resultArray[2]); - types.add(DataTypeRecognizer.transformTypeFromDB((String) resultArray[3])); - } - - if (descriptions.size() > 0) { - // String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r where id>=" + min.toString() + " and id<=" + max.toString(); - // String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r limit "+min+","+numberOfElements; - String query = "SELECT DISTINCT * FROM " + timeSeriesName + " r limit " + numberOfElements + " offset " + min; - AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveEntries: query " + query); - - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - - for (Object result : resultSet) { - Entry entry = new Entry(); - try { - Object[] resultArray = (Object[]) result; - int i = 0; - for (Object res : resultArray) { - // build entry - String value = ""; - if (res != null) - value = res.toString(); - - entry.addAttribute(descriptions.get(i), value); - entry.addType(descriptions.get(i), types.get(i)); - i++; - } - // add entry - currentEntries.add(entry); - // AnalysisLogger.getLogger().debug("DBObjectTranslator->retrieveEntries: Entry Added " + entry.toString()); - } catch (Exception e) { - // e.printStackTrace(); - AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveEntries: Error in adding entry :" + e.getLocalizedMessage()); - } - } - } - -// AnalysisLogger.getLogger().trace("DBObjectTranslator->retrieveEntries: Entries " + currentEntries); - return currentEntries; - } - - public void buildRelationsEdges(SessionFactory dbSession) { - - String query = "select * from relation_table;"; - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - for (Object result : resultSet) { - Object[] resultArray = (Object[]) result; - RelationEdge re = null; - try { - re = new RelationEdge(((String) resultArray[2]), "" + resultArray[0], "" + resultArray[1]); - } catch (Exception e) { - e.printStackTrace(); - } - if (re != null) { - relations.add(re); - AnalysisLogger.getLogger().trace("DBObjectTranslator->buildRelationsEdges: add relation " + re.toString()); - } - } - } - - public void buildCategories(SessionFactory dbSession, String referenceTable, String referenceColumn, String idColumn, String nameHuman, String description) { - - referenceTable = referenceTable == null ? "reference_table" : referenceTable; - referenceColumn = referenceColumn == null ? "table_name" : referenceColumn; - nameHuman = nameHuman == null ? "name_human" : nameHuman; - idColumn = idColumn == null ? "id" : idColumn; - description = description == null ? "description" : description; - - String query = "SELECT " + nameHuman + "," + idColumn + "," + referenceColumn + "," + description + " FROM " + referenceTable + " r;"; - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - if (resultSet != null) { - for (Object result : resultSet) { - Object[] resultArray = (Object[]) result; - Category cat = null; - try { - // name_human, id, table_name,description - cat = new Category("" + resultArray[0], "" + resultArray[1], "" + resultArray[2], "" + resultArray[3]); - } catch (Exception e) { - e.printStackTrace(); - } - if (cat != null) { - categories.add(cat); - AnalysisLogger.getLogger().trace("DBObjectTranslator->buildCategories: add category " + cat.toString()); - } - } - } - } - - public Category getCategoryfromIndex(String index) { - - Category cat = null; - for (Category c : categories) { - - if (c.getIndex().equals(index)) { - cat = c; - break; - } - } - - return cat; - } - - public void populateRelationWithCategories() { - - for (RelationEdge re : relations) { - - Category from = getCategoryfromIndex(re.getFrom()); - Category to = getCategoryfromIndex(re.getTo()); - re.setCategoryFrom(from.getName()); - re.setCategoryTo(to.getName()); - AnalysisLogger.getLogger().trace("DBObjectTranslator->populateRelationWithCategories: modified Relation " + re.toString()); - } - } - - public void calculateRelationWeights(SessionFactory dbSession) { - - for (RelationEdge re : relations) { - - String query = "SELECT count(*) FROM " + re.getName().toLowerCase(); - - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - for (Object result : resultSet) { - - try { - BigInteger resultcount = (BigInteger) result; - re.setWeigth(resultcount); - totalRelationElements = totalRelationElements.add(resultcount); - AnalysisLogger.getLogger().trace("DBObjectTranslator->calculateRelationWeights: Relation " + re.getName() + " weight " + re.getWeigth()); - } catch (Exception e) { - } - } - } - } - - public void calculateCategoriesWeights(SessionFactory dbSession) { - - for (Category cat : categories) { - - String query = "SELECT count(*) FROM " + cat.getTableName().toLowerCase(); - - List resultSet = DatabaseFactory.executeSQLQuery(query, dbSession); - - for (Object result : resultSet) { - - try { - BigInteger resultcount = (BigInteger) result; - cat.setNumberOfElements(resultcount); - totalCatElements = totalCatElements.add(resultcount); - AnalysisLogger.getLogger().trace("DBObjectTranslator->calculateCategoriesWeights: Category " + cat.getName() + " weight " + cat.getNumberOfElements() + " total " + totalCatElements); - } catch (Exception e) { - } - } - } - } - - public void buildCategoriesStructure(SessionFactory dbSession, String referenceTable, String referenceColumn, String idColumn, String nameHuman, String description) { - buildCategories(dbSession, referenceTable, referenceColumn, idColumn, nameHuman, description); - calculateCategoriesWeights(dbSession); - AnalysisLogger.getLogger().trace("DBObjectTranslator->buildWholeStructure: Total Categories Elements " + totalCatElements + " Total Relation Elements " + totalRelationElements); - } - - public void buildWholeStructure(SessionFactory dbSession, String referenceTable, String referenceColumn, String idColumn, String nameHuman, String description) { - - buildRelationsEdges(dbSession); - buildCategories(dbSession, referenceTable, referenceColumn, idColumn, nameHuman, description); - populateRelationWithCategories(); - calculateRelationWeights(dbSession); - calculateCategoriesWeights(dbSession); - - AnalysisLogger.getLogger().trace("DBObjectTranslator->buildWholeStructure: Total Categories Elements " + totalCatElements + " Total Relation Elements " + totalRelationElements); - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/Entry.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/Entry.java deleted file mode 100644 index 3eafcfc..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/Entry.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - -import java.util.HashMap; - -//a single entry from a category -public class Entry { - - HashMap attributes; - HashMap types; - - public HashMap getAttributes(){ - return attributes; - } - - public HashMap getTypes(){ - return types; - } - - public void addAttribute(String column,String value){ - if (value==null) - value = ""; - - attributes.put(column, value); - } - - public void addType(String column,String value){ - if (value==null) - value = ""; - - types.put(column, value); - } - - public Entry(){ - attributes = new HashMap(); - types = new HashMap(); - } - - public String toString(){ - - StringBuffer returningString = new StringBuffer(); - returningString.append("{"); - for (String att: attributes.keySet()){ - String value = attributes.get(att); - returningString.append(att+"="+value+"|"+types.get(att).toUpperCase()+"; "); - } - returningString.append("}"); - return returningString.toString(); - } -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/RelationEdge.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/RelationEdge.java deleted file mode 100644 index 98035e4..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/RelationEdge.java +++ /dev/null @@ -1,71 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - -import java.math.BigInteger; - -public class RelationEdge { - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - - private String relationName; - private String indexFrom; - private String indexTo; - private BigInteger weight; - - private String categoryFrom; - private String categoryTo; - - - public BigInteger getWeigth(){ - return weight; - } - - public void setWeigth(BigInteger Weight){ - weight = Weight; - } - - public String getTo(){ - return indexTo; - } - public String getFrom(){ - return indexFrom; - } - public String getName(){ - return relationName; - } - public void setName(String name){ - relationName = name; - } - - public RelationEdge(String name,String from,String to){ - relationName = name; - indexFrom = from; - indexTo = to; - } - @Override - public String toString(){ - return "["+relationName+": from "+indexFrom+" to " +indexTo+" nameFrom "+categoryFrom+" nameTo "+categoryTo+"]"; - } - - public void setCategoryFrom(String categoryFrom) { - this.categoryFrom = categoryFrom; - } - - public String getCategoryFrom() { - return categoryFrom; - } - - public void setCategoryTo(String categoryTo) { - this.categoryTo = categoryTo; - } - - public String getCategoryTo() { - return categoryTo; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/SingleResult.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/SingleResult.java deleted file mode 100644 index 2689ce5..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/SingleResult.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - - -public class SingleResult { - private String category; - private String column; - - private String tablename; - private String familyID; - - private double score; - - public void setCategory(String category) { - this.category = category; - } - public String getCategory() { - return category; - } - public void setColumn(String column) { - this.column = column; - } - public String getColumn() { - return column; - } - public void setScore(double score) { - this.score = score; - } - public double getScore() { - return score; - } - - public String getStringScore() { - double scored = Math.round((int)(score*100))/(double)100; - - return ""+scored; - } - - public String toString(){ - double scored = Math.round((int)(score*100))/(double)100; - if (column!=null) - return category+"="+column+":"+scored+" tab:"+tablename+":"+familyID; - else - return category+"="+":"+scored; - } - - public SingleResult (String Category,String Column,double Score, String TableName,String FamilyID){ - category = Category; - column = Column; - score = Score; - tablename = TableName; - familyID = FamilyID; - } - public void setTablename(String tablename) { - this.tablename = tablename; - } - public String getTablename() { - return tablename; - } - public void setFamilyID(String familyID) { - this.familyID = familyID; - } - public String getFamilyID() { - return familyID; - } -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/TSObjectTransformer.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/TSObjectTransformer.java deleted file mode 100644 index 0cb9ab0..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/data/TSObjectTransformer.java +++ /dev/null @@ -1,80 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data; - -import java.math.BigDecimal; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph.GraphFramer; - - - -public class TSObjectTransformer { - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - - public static CategoryOrderedList transform2List(DBObjectTranslator dbo, LexicalEngineConfiguration config){ - return transform2List(dbo,config,null); - } - - public static CategoryOrderedList transform2List(DBObjectTranslator dbo, LexicalEngineConfiguration config, String filter){ - CategoryOrderedList col = new CategoryOrderedList(config); - for (Category cat:dbo.categories){ - if ((filter==null) || filter.equalsIgnoreCase(cat.getName())) - col.addCategory(cat); - } - return col; - } - - - - public static void transform2Graph(DBObjectTranslator dbo){ - - GraphFramer starter = new GraphFramer("Time Series Graph"); - BigDecimal total = new BigDecimal(dbo.totalCatElements); -// total = new BigDecimal(100).divide(total,2,BigDecimal.ROUND_HALF_UP); - for (Category cat:dbo.categories){ - - BigDecimal bd = new BigDecimal(cat.getNumberOfElements()); - - bd = bd.divide(total,4,BigDecimal.ROUND_HALF_UP); - bd = bd.multiply(new BigDecimal(100)); - bd = bd.setScale(2,BigDecimal.ROUND_HALF_UP); -// double perc = bd.doubleValue()*100; - - String builtname = cat.getName()+":"+bd+"% "; - - starter.graphDisplayer.addVertex(builtname); - } - for (RelationEdge rel:dbo.relations){ - Category cat = dbo.getCategoryfromIndex(rel.getFrom()); - BigDecimal bd = new BigDecimal(cat.getNumberOfElements()); - bd = bd.divide(total,4,BigDecimal.ROUND_HALF_UP); - bd = bd.multiply(new BigDecimal(100)); - bd = bd.setScale(2,BigDecimal.ROUND_HALF_UP); -// double perc = bd.doubleValue()*100; - - String name1 = cat.getName()+":"+bd+"% "; - - cat = dbo.getCategoryfromIndex(rel.getTo()); - bd = new BigDecimal(cat.getNumberOfElements()); - bd = bd.divide(total,4,BigDecimal.ROUND_HALF_UP); - bd = bd.multiply(new BigDecimal(100)); - bd = bd.setScale(2,BigDecimal.ROUND_HALF_UP); -// perc = bd.doubleValue()+100; - - String name2 = cat.getName()+":"+bd+"% "; - starter.graphDisplayer.addEdge(name1,name2,new BigDecimal(rel.getWeigth()).divide(new BigDecimal(dbo.totalCatElements),2,BigDecimal.ROUND_HALF_UP).multiply(new BigDecimal(100)).doubleValue()); -// starter.graphDisplayer.addEdge(name1,name2,0); - } - -// starter.graphDisplayer.generateRandomGraph(); - starter.graphDisplayer.generateUpTo5StarGraph(); - - starter.go(); - } -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/interfaces/Reference.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/interfaces/Reference.java deleted file mode 100644 index d599f5b..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/interfaces/Reference.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.interfaces; - -import java.math.BigInteger; - -public interface Reference { - - public void setName(String categoryName); - public String getName(); - public void setIndex(String categoryIndex); - public String getIndex(); - public void setTableName(String tableName); - public String getTableName(); - public void setDescription(String description); - public String getDescription(); - public String toString(); - public void setNumberOfElements(BigInteger numberOfElements); - public BigInteger getNumberOfElements(); - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/Chunk.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/Chunk.java deleted file mode 100644 index 6720266..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/Chunk.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; - -public abstract class Chunk { - - - protected Engine engine; - - public Chunk(Engine engine){ - this.engine = engine; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ChunkSet.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ChunkSet.java deleted file mode 100644 index ed849ad..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ChunkSet.java +++ /dev/null @@ -1,128 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; - -public abstract class ChunkSet { - - - protected String seriesName; - protected String seriesColumn; - protected int chunkSize; - private int maxNumberOfChunks; - ArrayList chunkSet; - protected int chunkSetSize; - protected BigInteger numberOfEntries; - protected int chunkIndex; - protected LexicalEngineConfiguration config; - protected Engine engine; - - public ChunkSet(int MaxNumberOfChunks, int ChunkSize, String SeriesName,String SeriesColumn, LexicalEngineConfiguration Config, Engine engine) throws Exception{ - this.engine = engine; - config = Config; - setSeriesName(SeriesName); - setSeriesColumn(SeriesColumn); - setChunkSize(ChunkSize); - maxNumberOfChunks = MaxNumberOfChunks; - generateChunkSet(); - - } - - public ChunkSet(int MaxNumberOfChunks, int ChunkSize, String SeriesName,String SeriesColumn,BigInteger numberOfEntries,LexicalEngineConfiguration Config , Engine engine) throws Exception{ - this.engine = engine; - config = Config; - setSeriesName(SeriesName); - setSeriesColumn(SeriesColumn); - setChunkSize(ChunkSize); - setNumberOfEntries(numberOfEntries); - maxNumberOfChunks = MaxNumberOfChunks; - generateChunkSet(); - - - } - - - - public void generateChunkSet() throws Exception{ - - AnalysisLogger.getLogger().trace("ChunkSet->generateChunkSet-> \tGenerating Chunk Set for " + seriesName+ " "+seriesColumn); - int numberOfChunks = calculateNumberOfCycles(); - //generate chunks to be processed - chunkSet = MathFunctions.generateRandoms(maxNumberOfChunks, 0, numberOfChunks); - chunkIndex = 0; - chunkSetSize = numberOfChunks; - } - - - abstract protected BigDecimal calculateNumberOfElements() throws Exception; - - - protected int calculateNumberOfCycles() throws Exception { - - int numberofcycles = 0; - - // calculate total entries in the time series - BigDecimal numberOfElements = calculateNumberOfElements(); - // calculate total cycles of comparison - BigDecimal intcycles; - BigDecimal oddcycles; - BigDecimal[] arraydecimal = numberOfElements.divideAndRemainder(new BigDecimal(BigInteger.valueOf(chunkSize))); - intcycles = arraydecimal[0]; - oddcycles = arraydecimal[1]; - numberofcycles = intcycles.intValue(); - if ((numberofcycles==0)&&(oddcycles.intValue() > 0)) numberofcycles = numberofcycles + 1; - - return numberofcycles; - - } - - public void setSeriesName(String seriesName) { - this.seriesName = seriesName; - } - - - public String getSeriesName() { - return seriesName; - } - - - public void setSeriesColumn(String seriesColumn) { - this.seriesColumn = seriesColumn; - } - - - public String getSeriesColumn() { - return seriesColumn; - } - - - public void setChunkSize(int chunkSize) { - this.chunkSize = chunkSize; - } - - - public int getChunkSize() { - return chunkSize; - } - - public void setNumberOfEntries(BigInteger numberOfEntries) { - this.numberOfEntries = numberOfEntries; - } - - public BigInteger getNumberOfEntries() { - return numberOfEntries; - } - - - - abstract public Object nextChunk(); - - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ReferenceChunk.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ReferenceChunk.java deleted file mode 100644 index 1f2ad15..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ReferenceChunk.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - - -import java.math.BigInteger; -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Entry; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.hibernate.SessionFactory; - -public class ReferenceChunk extends Chunk{ - - - - private String categoryName; - private String categoryTableName; - private ArrayList referenceEntries; - - private BigInteger startPoint; - private int chunkSize; - - public ReferenceChunk(String CategoryName, String CategoryTableName, BigInteger StartPoint, int ChunkSize, Engine engine){ - super(engine); - chunkSize = ChunkSize; - categoryName = CategoryName; - categoryTableName = CategoryTableName; - startPoint = StartPoint; - AnalysisLogger.getLogger().trace("ReferenceChunk-> \t\tTOOK CATEGORY CHUNK FOR CATEGORY: " + categoryName+" - index : "+startPoint); - } - - - //takes references on demand from DB - public ArrayList getReferenceEntries() throws Exception{ - - DBObjectTranslator dbo = new DBObjectTranslator(); - SessionFactory sess = engine.getDBSession(); -// AnalysisLogger.getLogger().debug("ReferenceChunk->getReferenceEntries-> \tCATEGORY CHUNK START : " + startPoint); - referenceEntries = dbo.retrieveEntries(sess, categoryTableName, startPoint, chunkSize); - return referenceEntries; - } - - public void setCategoryName(String categoryName) { - this.categoryName = categoryName; - } - public String getCategoryName() { - return categoryName; - } - - - - - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ReferenceChunkSet.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ReferenceChunkSet.java deleted file mode 100644 index d847ebf..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/ReferenceChunkSet.java +++ /dev/null @@ -1,51 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - - -import java.math.BigDecimal; -import java.math.BigInteger; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; - -public class ReferenceChunkSet extends ChunkSet{ - - - public ReferenceChunkSet(int MaxNumberOfChunks, int ChunkSize, String CategoryName,String CategoryColumn, LexicalEngineConfiguration config, Engine engine) throws Exception{ - super(MaxNumberOfChunks, ChunkSize, CategoryName,CategoryColumn, config, engine); - } - - public ReferenceChunkSet(int MaxNumberOfChunks, int ChunkSize, String CategoryName, String CategoryTable, BigInteger numberOfCategoryElements, LexicalEngineConfiguration config, Engine engine) throws Exception{ - super(MaxNumberOfChunks, ChunkSize, CategoryName, CategoryTable, numberOfCategoryElements, config, engine); - } - - protected BigDecimal calculateNumberOfElements() throws Exception{ - // calculate total entries in the time series - BigDecimal numberOfElements = new BigDecimal(numberOfEntries); - return numberOfElements; - } - - - public ReferenceChunk nextChunk() { - - ReferenceChunk rc = null; - - while (!chunkSet.contains(chunkIndex) && (chunkIndex < chunkSetSize)) { - chunkIndex++; - } - if (chunkIndex < chunkSetSize) { - BigInteger startIndex = MathFunctions.chunk2Index(chunkIndex, chunkSize); - try { - rc = new ReferenceChunk(seriesName, seriesColumn , startIndex, chunkSize, engine); - } catch (Exception e) { - e.printStackTrace(); - } - } - - chunkIndex++; - return rc; - - } - - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/SetOfReferenceChunkSet.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/SetOfReferenceChunkSet.java deleted file mode 100644 index a2020af..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/SetOfReferenceChunkSet.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.interfaces.Reference; - -public class SetOfReferenceChunkSet { - - ArrayList orderedList; - int referenceIndex; - LexicalEngineConfiguration config; - Engine engine; - - public SetOfReferenceChunkSet(ArrayList OrderedList, LexicalEngineConfiguration Config, Engine engine){ - - this.engine = engine; - orderedList = OrderedList; - referenceIndex = 0; - config = Config; - } - - //filter selects only one of the categories - public ReferenceChunkSet getNextChunkSet(){ - ReferenceChunkSet cs = null; - if (orderedList.size()>referenceIndex){ - Reference ref = orderedList.get(referenceIndex); - try{ - cs = new ReferenceChunkSet(config.ReferenceChunksToTake,config.chunkSize,ref.getName(),ref.getTableName(),ref.getNumberOfElements(),config, engine); - }catch (Exception e){ - e.printStackTrace(); - } - referenceIndex++; - } - - return cs; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/SingletonChunkSet.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/SingletonChunkSet.java deleted file mode 100644 index fa98f73..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/SingletonChunkSet.java +++ /dev/null @@ -1,52 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - - -import java.math.BigDecimal; -import java.math.BigInteger; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; - - -public class SingletonChunkSet extends ChunkSet { - - private String singletonString; - private String ColumnType; - public SingletonChunkSet(String SingletonString, LexicalEngineConfiguration config, Engine engine) throws Exception { - super(1, 1, null, null, config, engine); - singletonString = SingletonString; - ColumnType = null; - } - - protected BigDecimal calculateNumberOfElements() throws Exception { - // calculate total entries in the time series - BigDecimal numberOfElements = BigDecimal.ONE; - return numberOfElements; - } - - public TimeSeriesChunk nextChunk() { - - TimeSeriesChunk tsc = null; - - while (!chunkSet.contains(chunkIndex) && (chunkIndex < chunkSetSize)) { - chunkIndex++; - } - if (chunkIndex < chunkSetSize) { - BigInteger startIndex = MathFunctions.chunk2Index(chunkIndex, chunkSize); - - try { - tsc = new TimeSeriesChunk(singletonString, ColumnType, startIndex, chunkSize, config, engine); - if (ColumnType == null) { - ColumnType = tsc.getColumnType(); - } - } catch (Exception e) { - e.printStackTrace(); - } - } - chunkIndex++; - return tsc; - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/TimeSeriesChunk.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/TimeSeriesChunk.java deleted file mode 100644 index 86c816a..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/TimeSeriesChunk.java +++ /dev/null @@ -1,167 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.HashMap; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.CategoryScores; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.Entry; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.DistanceCalculator; -import org.hibernate.SessionFactory; - -public class TimeSeriesChunk extends Chunk{ - - - private ArrayList columnEntries; - private String columnType; - private LexicalEngineConfiguration config; - private boolean mustInterrupt; - private ArrayList detailedResults; - private String singletonElement; - private boolean isSingleton; - - public String getColumnType(){ - return columnType; - } - - public String getSingletonEntry(){ - return singletonElement; - } - - public ArrayList getDetailedResults(){ - return detailedResults; - } - public boolean isSingleton(){ - return isSingleton; - } - - public TimeSeriesChunk(String timeSeriesName, String timeSeriesColumn, String ColumnType, BigInteger start, int ChunkSize, LexicalEngineConfiguration Config, Engine engine) throws Exception{ - super(engine); - DBObjectTranslator dbo = new DBObjectTranslator(); - SessionFactory sess = engine.getDBSession(); - columnEntries = dbo.retrieveTimeSeriesEntries(sess, timeSeriesName, timeSeriesColumn, start, ChunkSize); - if (ColumnType==null){ - columnType = DataTypeRecognizer.guessType(columnEntries); - AnalysisLogger.getLogger().trace("TimeSeriesChunk-> GUESSED TYPE " + columnType + " FOR COLUMN "+timeSeriesColumn); - } - mustInterrupt = false; - config = Config; - isSingleton = false; - } - - public TimeSeriesChunk(String singletonString, String ColumnType, BigInteger start, int ChunkSize, LexicalEngineConfiguration Config, Engine engine) throws Exception{ - super(engine); - columnEntries = new ArrayList(); - columnEntries.add(singletonString); - if (ColumnType==null){ - columnType = DataTypeRecognizer.guessType(columnEntries); - AnalysisLogger.getLogger().trace("TimeSeriesChunk-> GUESSED TYPE " + columnType + " FOR SINGLETON "+singletonString); - } - mustInterrupt = false; - config = Config; - isSingleton = true; - singletonElement = singletonString; - detailedResults = new ArrayList(); - } - - - - public boolean mustInterruptProcess (){ - return this.mustInterrupt; - } - public void compareToReferenceChunk(HashMap scoresTable, ReferenceChunk catChunk) throws Exception { - compareToReferenceChunk(scoresTable, catChunk,null); - } - - // checks an entry set against a reference set - // columnEntries: column elements from unknown column - // cat: category analyzed for candidating to recognized - // referenceEntries: some elements belonging to cat, to be compared to columnEntries - public void compareToReferenceChunk(HashMap scoresTable, ReferenceChunk catChunk,String ColumnFilter) throws Exception { - - - //in the case of a singleton Chunk interrupt computation in case of exact match - - // get category Score for further processing - CategoryScores categoryScores = scoresTable.get(catChunk.getCategoryName()); - //extract Entries from DB - ArrayList categoryEntries = catChunk.getReferenceEntries(); - - for (String timeSeriesElement : columnEntries) { - // for each reference entry - for (Entry referenceEntry : categoryEntries) { - - // take all attributes of a reference entry for confrontation to columns - HashMap attributes = referenceEntry.getAttributes(); - HashMap types = referenceEntry.getTypes(); - boolean anotherReference= true; - - // for each attribute of an entry - for (String referenceColumn : attributes.keySet()) { - - // perform calculation only if the column type is the same - if (types.get(referenceColumn).equals(columnType)&&((ColumnFilter==null)||(ColumnFilter.equalsIgnoreCase(referenceColumn)))) { -// AnalysisLogger.getLogger().debug("CategoryOrderedList->checkAllEntriesOnEntireCategory-> REFERENCE COLUMN "+referenceColumn+" HAS TYPE "+types.get(referenceColumn)); - // take the attribute value of the entry - String attribute = attributes.get(referenceColumn); - // calculate the distance between the unknown entry and the attribute - DistanceCalculator d = new DistanceCalculator(); - double percentage = d.CD(config.useSimpleDistance, timeSeriesElement, attribute, isSingleton, isSingleton) * 100f; -// AnalysisLogger.getLogger().debug("CategoryOrderedList->checkUnkEntriesOnEntireCategory-> Percentage between " +timeSeriesElement + " and " + attribute + " is: "+percentage ); - // if they are similar - if (percentage > config.entryAcceptanceThreshold) { -// if (catChunk.getCategoryName().equals("COUNTRY_OLD")) - AnalysisLogger.getLogger().trace("TimeSeriesChunk->compareToCategoryChunk-> \t\tPercentage between " + timeSeriesElement + " vs. " + attribute + " is: " + percentage+" in "+catChunk.getCategoryName()+":"+referenceColumn); - - categoryScores.incrementScore(referenceColumn, (float)percentage,anotherReference); - - //if we are in a singleton we have to get the details - if (isSingleton){ - //for singleton match, fulfil details - int index =0; - for (SingleResult sr :detailedResults){ - - Double scoredetail = sr.getScore(); - - if (scoredetailcompareToCategoryChunk-> "+categoryScores.showScores()); - } - //if exact match is reached, exit - if ((percentage==100)&&(isSingleton)) - { - detailedResults = new ArrayList(); - detailedResults.add(new SingleResult(attribute, null, percentage,null,"0")); - mustInterrupt = true; - break; - } - } - } - - }// end for on columns - - if (mustInterrupt) - break; - - }// end for on entries - } - } - - - - - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/TimeSeriesChunkSet.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/TimeSeriesChunkSet.java deleted file mode 100644 index f165742..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/chunks/TimeSeriesChunkSet.java +++ /dev/null @@ -1,53 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.chunks; - - -import java.math.BigDecimal; -import java.math.BigInteger; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator; -import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions; -import org.hibernate.SessionFactory; - -public class TimeSeriesChunkSet extends ChunkSet { - - private String ColumnType; - - public TimeSeriesChunkSet(int MaxNumberOfChunks, int ChunkSize, String TimeSeriesName, String TimeSeriesColumn, LexicalEngineConfiguration config, Engine engine) throws Exception { - super(MaxNumberOfChunks, ChunkSize, TimeSeriesName, TimeSeriesColumn, config,engine); - ColumnType = null; - } - - protected BigDecimal calculateNumberOfElements() throws Exception { - // calculate total entries in the time series - DBObjectTranslator dbo = new DBObjectTranslator(); - SessionFactory sess = engine.getDBSession(); - BigDecimal numberOfElements = new BigDecimal(dbo.calculateTotalEntries(sess, seriesName, seriesColumn)); - return numberOfElements; - } - - public TimeSeriesChunk nextChunk() { - - TimeSeriesChunk tsc = null; - - while (!chunkSet.contains(chunkIndex) && (chunkIndex < chunkSetSize)) { - chunkIndex++; - } - if (chunkIndex < chunkSetSize) { - BigInteger startIndex = MathFunctions.chunk2Index(chunkIndex, chunkSize); - try { - tsc = new TimeSeriesChunk(seriesName, seriesColumn, ColumnType, startIndex, chunkSize, config, engine); - if (ColumnType == null) { - ColumnType = tsc.getColumnType(); - } - } catch (Exception e) { - e.printStackTrace(); - } - } - chunkIndex++; - return tsc; - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomListenableDirectedWeightedGraph.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomListenableDirectedWeightedGraph.java deleted file mode 100644 index bdda6ad..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomListenableDirectedWeightedGraph.java +++ /dev/null @@ -1,25 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - -import org.jgrapht.graph.ListenableDirectedWeightedGraph; - -public class CustomListenableDirectedWeightedGraph extends ListenableDirectedWeightedGraph{ - - - public CustomListenableDirectedWeightedGraph(Class arg0) { - super(arg0); - } - - public void setEdgeWeight(E e, double weight) { - super.setEdgeWeight(e, weight); - - ((CustomWeightedEdge)e).setWeight(weight); - } - - public E addEdge(V o1,V o2) { - E out = super.addEdge(o1,o2); - ((CustomWeightedEdge)out).setEdges(o1,o2); - - return out; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomWeightedEdge.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomWeightedEdge.java deleted file mode 100644 index 3aa5ac1..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomWeightedEdge.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - -import org.jgrapht.graph.DefaultWeightedEdge; - -import com.touchgraph.graphlayout.Edge; - -public class CustomWeightedEdge extends DefaultWeightedEdge{ - - @Override - public String toString(){ - return "["+o1+":"+o2+":"+weight+"%]"; - } - - private double weight; - private Object o1; - private Object o2; - - public void setWeight(double weight){ - this.weight = weight; - } - - public void setEdges(Object o1,Object o2){ - this.o1=o1; - this.o2=o2; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomWeightedVertex.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomWeightedVertex.java deleted file mode 100644 index 9810ccd..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/CustomWeightedVertex.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - -import org.jgrapht.graph.DefaultWeightedEdge; - -import com.touchgraph.graphlayout.Edge; - -public class CustomWeightedVertex { - - @Override - public String toString() { - - return "[" + name + ":" + weight + "%]"; - } - - private double weight; - private String name; - - public CustomWeightedVertex(String name, double weight) { - this.weight = weight; - this.name = name; - } - - public CustomWeightedVertex(String name) { - this.weight = 0; - this.name = name; - } - - public boolean equals(CustomWeightedVertex v) { - - if (v.name.equals(name)) - return true; - else - return false; - - } -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphDisplayer.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphDisplayer.java deleted file mode 100644 index 8df91c1..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphDisplayer.java +++ /dev/null @@ -1,299 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - - -import java.awt.Color; -import java.awt.Dimension; -import java.awt.Rectangle; -import java.awt.geom.Rectangle2D; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; -import java.util.Random; - -import javax.swing.JApplet; - -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.jgraph.JGraph; -import org.jgraph.graph.DefaultGraphCell; -import org.jgraph.graph.GraphConstants; -import org.jgrapht.ext.JGraphModelAdapter; - -public class GraphDisplayer extends JApplet { - private static final Color DEFAULT_BG_COLOR = Color.decode("#FAFBFF"); - private static final Dimension DEFAULT_SIZE = new Dimension(530, 320); - - private JGraphModelAdapter m_jgAdapter; - - public static int WIDTH = 1000; - public static int HEIGHT = 800; - - public static int WIDTHBOX = 1280; - public static int HEIGHTBOX = 1024; - - private int newxposition; - private int newyposition; - - private CustomListenableDirectedWeightedGraph g; - private int nodesCounter; - private static final int minx = 10; - private static final int miny = 10; - ArrayList VertexNames; - HashMap Edges; - - public void generatePosition(int lastxPosition, int lastyposition) { - - int rangex = (int) WIDTH - (int) lastxPosition; - // compute a fraction of the range, 0 <= frac < range - Random a = new Random(); - int newx = lastxPosition + 70 + (int) (rangex * a.nextDouble()); - int epsilon = 1; - int newy = (int) lastyposition + (int) (epsilon * 20f * Math.random()); - if (newx > WIDTH) - newx = WIDTH - 100; - if (newx < lastxPosition - 90) - newx = lastxPosition + 90; - if (newy > HEIGHT) - newy = HEIGHT - 10; - if (newy < 0) - newy = 0; - newxposition = newx; - newyposition = newy; - // System.out.println("LAST X "+lastxPosition+" NEW X "+newxposition); - // System.out.println("LAST Y "+lastyposition+" NEW Y "+newyposition); - } - - public void init() { - AnalysisLogger.getLogger().debug("INIZIALIZZATO!"); - - JGraph jgraph = new JGraph(m_jgAdapter); - - adjustDisplaySettings(jgraph); - getContentPane().add(jgraph); - resize(DEFAULT_SIZE); - - AnalysisLogger.getLogger().debug("RESIZED!"); - } - - public void generateGraph() { - - for (String v : VertexNames) { - genPositionVertex(v); - } - } - - public void generateRandomGraph() { - - for (String v : VertexNames) { - int randx = minx + (int) ((WIDTH - 100) * Math.random()); - int randy = miny + (int) ((HEIGHT - 100) * Math.random()); - positionVertexAt(v, randx, randy); - } - } - - public void generateUpTo5StarGraph() { - - // individua le star - HashMap vertexFrequencies = new HashMap(); - // calcolo le frequenze dei vertici - for (String edge : Edges.values()) { - System.out.println(edge + "-" + vertexFrequencies.get(edge)); - if (vertexFrequencies.get(edge) != null) { - int f = vertexFrequencies.get(edge).intValue(); - vertexFrequencies.put(edge, new Integer(f + 1)); - } else - vertexFrequencies.put(edge, new Integer(0)); - - } - - for (String vertex : VertexNames) { - - if (Edges.get(vertex) == null) { - boolean trovato = false; - // cerco ogni vertice tra gli archi - for (String starvertex : Edges.values()) { - if (vertex.equals(starvertex)) { - trovato = true; - break; - } - } - if (!trovato) { - System.out.println("aggiunto vertice isolato " + vertex); - vertexFrequencies.put(vertex, new Integer(0)); - } - } - - } - - System.out.println("FEQS " + vertexFrequencies.toString()); - // ordino le star - ArrayList starList = new ArrayList(); - for (String vertex : vertexFrequencies.keySet()) { - - int freq = vertexFrequencies.get(vertex); - int i = 0; - boolean trovato = false; - for (String element : starList) { - - int referfreq = vertexFrequencies.get(element); - if (referfreq < freq) { - starList.add(i, vertex); - trovato = true; - break; - } - i++; - } - if (!trovato) - starList.add(vertex); - } - - // dispongo le star nel layout - System.out.println(starList.toString()); - int bound = 200; - int[] boundedXIndexex = { bound, WIDTH - bound, bound, WIDTH - bound, WIDTH / 2 }; - int[] boundedYIndexex = { bound, bound, HEIGHT - bound, HEIGHT - bound, HEIGHT / 2 }; - int sizeStar = starList.size(); - // int sizeStar = 1; - - // distribuisco le star sul grafico - for (int i = 0; i < sizeStar; i++) { - - positionVertexAt(starList.get(i), boundedXIndexex[i], boundedYIndexex[i]); - - // calcolo il numero di elementi della stella - int countelems = 0; - for (String edge : Edges.keySet()) { - if (Edges.get(edge).equals(starList.get(i))) { - countelems++; - } - } - - if (countelems > 0) { - double subdivision = 360 / countelems; - double angle = 105f; - double radius = 200f; - System.out.println("Numero di elementi nella stella: " + countelems + " suddivisioni: " + subdivision); - for (String edge : Edges.keySet()) { - // dispongo gli elementi a stella - if (Edges.get(edge).equals(starList.get(i))) { - int currentx = boundedXIndexex[i]; - int currenty = boundedYIndexex[i]; - int epsilonx = (int) (radius * Math.cos(Math.toRadians(angle))); - int epsilony = (int) (radius * Math.sin(Math.toRadians(angle))); - System.out.println("angolo attuale: " + angle + " x0: " + currentx + " y0 " + currenty + " ex " + epsilonx + " ey " + epsilony); - positionVertexAt(edge, currentx + epsilonx, currenty + epsilony); - - angle += subdivision; - } - } - } - - } - - } - - private void genPositionVertex(String vertexName) { - - if (nodesCounter > 0) { - if ((nodesCounter % 2) == 0) { - newxposition = 10 + (int) (20f * Math.random()); - newyposition += 100; - } else - generatePosition(newxposition, newyposition); - } - - positionVertexAt(vertexName, newxposition, newyposition); - nodesCounter++; - } - - public GraphDisplayer() { - g = new CustomListenableDirectedWeightedGraph(CustomWeightedEdge.class); - m_jgAdapter = new JGraphModelAdapter(g); - VertexNames = new ArrayList(); - Edges = new HashMap(); - newxposition = minx; - newyposition = miny; - nodesCounter = 0; - } - - public void addVertex(String name) { - g.addVertex(name); - VertexNames.add(name); - } - - public void addEdge(String v1, String v2, double bi) { - CustomWeightedEdge ed = (CustomWeightedEdge)g.addEdge(v1,v2); - g.setEdgeWeight(ed,bi); - Edges.put(v1, v2); - } - - private void adjustDisplaySettings(JGraph jg) { - jg.setPreferredSize(DEFAULT_SIZE); - - Color c = DEFAULT_BG_COLOR; - String colorStr = null; - - try { - colorStr = getParameter("bgcolor"); - } catch (Exception e) { - } - - if (colorStr != null) { - c = Color.decode(colorStr); - } - - jg.setBackground(c); - } - - private void positionVertexAt(Object vertex, int x, int y) { - - // seleziono la cella chiamata vertex - DefaultGraphCell cell = m_jgAdapter.getVertexCell(vertex); - - - // recupero gli attributi della cella - Map attr = cell.getAttributes(); - // recupero i boundaries della cella - Rectangle2D b = GraphConstants.getBounds(attr); - // setto i parametri del nuovo rettangolo - GraphConstants.setBounds(attr, new Rectangle(x, y, (int) (((String)vertex).length()+50+b.getWidth()), (int) b.getHeight())); - // costruisco una nuova cella - Map cellAttr = new HashMap(); - cellAttr.put(cell, attr); - - // posiziono la cella nel grafo - m_jgAdapter.edit(cellAttr, null, null, null); - - } - - public void start() { - repaint(); - - } - - public static void main(String[] args) { - - GraphFramer starter = new GraphFramer("Grafo"); - - // create a visualization using JGraph, via an adapter - String nodi[] = { "ciao", "come", "stai", "oggi", "domani", "dopodomani" }; - for (String nodo : nodi) { - starter.graphDisplayer.addVertex(nodo); - } - - for (int j = 0; j < nodi.length; j++) { - int i0 = (int) (nodi.length * Math.random()); - int i1 = (int) (nodi.length * Math.random()); - System.out.println("i0: " + i0 + " i1: " + i1); - if (i0 != i1) { - starter.graphDisplayer.addEdge(nodi[i0], nodi[i1],0); - } - } - - starter.graphDisplayer.generateGraph(); - - starter.go(); - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphFramer.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphFramer.java deleted file mode 100644 index 9db3a9f..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphFramer.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - -import java.awt.Event; -import java.awt.Frame; - -public class GraphFramer extends Frame{ - - public GraphDisplayer graphDisplayer; - - public GraphFramer(String frameName){ - super(frameName); - graphDisplayer = new GraphDisplayer(); - add("Center",graphDisplayer); - - } - - public void go(){ - - graphDisplayer.init(); - - this.resize(GraphDisplayer.WIDTHBOX, GraphDisplayer.HEIGHTBOX); - this.show(); - graphDisplayer.start(); - - } - - public boolean HandleEvent(Event event){ - - if (event.id == Event.WINDOW_DESTROY) - - { - try - {graphDisplayer.stop(); - graphDisplayer.destroy(); - }catch(Exception e){e.printStackTrace();} - System.exit(0); - } - return false; - } -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphGeneratorApplet.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphGeneratorApplet.java deleted file mode 100644 index b55cd02..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/GraphGeneratorApplet.java +++ /dev/null @@ -1,106 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - -import java.awt.Color; -import java.awt.Dimension; -import java.awt.Rectangle; -import java.awt.geom.Rectangle2D; - -import java.util.HashMap; -import java.util.Map; - -import javax.swing.JApplet; -import javax.swing.JFrame; - -import org.jgraph.JGraph; -import org.jgraph.graph.AttributeMap; -import org.jgraph.graph.DefaultGraphCell; -import org.jgraph.graph.GraphConstants; - -import org.jgrapht.ListenableGraph; -import org.jgrapht.ext.JGraphModelAdapter; -import org.jgrapht.graph.ListenableDirectedGraph; -import org.jgrapht.graph.DefaultEdge; - - -public class GraphGeneratorApplet extends JApplet { - private static final Color DEFAULT_BG_COLOR = Color.decode("#FAFBFF"); - private static final Dimension DEFAULT_SIZE = new Dimension(530, 320); - - // - private JGraphModelAdapter m_jgAdapter; - - /** - * @see java.applet.Applet#init(). - */ - public void init() { - // create a JGraphT graph - ListenableGraph g = new ListenableDirectedGraph(DefaultEdge.class); - - // create a visualization using JGraph, via an adapter - m_jgAdapter = new JGraphModelAdapter(g); - - JGraph jgraph = new JGraph(m_jgAdapter); - - adjustDisplaySettings(jgraph); - getContentPane().add(jgraph); - resize(DEFAULT_SIZE); - - // add some sample data (graph manipulated via JGraphT) - g.addVertex("v1"); - g.addVertex("v2"); - g.addVertex("v3"); - g.addVertex("v4"); - - g.addEdge("v1", "v2"); - g.addEdge("v2", "v3"); - g.addEdge("v3", "v1"); - g.addEdge("v4", "v3"); - - - - // position vertices nicely within JGraph component - positionVertexAt("v1", 130, 40); - positionVertexAt("v2", 60, 200); - positionVertexAt("v3", 310, 230); - positionVertexAt("v4", 380, 70); - - // that's all there is to org.gcube.contentmanagement.lexicalmatcher!... - } - - private void adjustDisplaySettings(JGraph jg) { - jg.setPreferredSize(DEFAULT_SIZE); - - Color c = DEFAULT_BG_COLOR; - String colorStr = null; - - try { - colorStr = getParameter("bgcolor"); - } catch (Exception e) { - } - - if (colorStr != null) { - c = Color.decode(colorStr); - } - - jg.setBackground(c); - } - - private void positionVertexAt(Object vertex, int x, int y) { - - - //seleziono la cella chiamata vertex - DefaultGraphCell cell = m_jgAdapter.getVertexCell(vertex); - //recupero gli attributi della cella - Map attr = cell.getAttributes(); - //recupero i boundaries della cella - Rectangle2D b = GraphConstants.getBounds(attr); - //setto i parametri del nuovo rettangolo - GraphConstants.setBounds(attr, new Rectangle(x, y, (int)b.getWidth(), (int)b.getHeight())); - //costruisco una nuova cella - Map cellAttr = new HashMap(); - cellAttr.put(cell, attr); - //posiziono la cella nel grafo - m_jgAdapter.edit(cellAttr, null, null, null); - - } -} \ No newline at end of file diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/RelationEdge.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/RelationEdge.java deleted file mode 100644 index a62c010..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/RelationEdge.java +++ /dev/null @@ -1,73 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - -import java.math.BigInteger; - -import org.jgrapht.graph.DefaultWeightedEdge; - -public class RelationEdge extends DefaultWeightedEdge{ - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - - private String relationName; - private long indexFrom; - private long indexTo; - private BigInteger weight; - - private String categoryFrom; - private String categoryTo; - - - public BigInteger getWeigth(){ - return weight; - } - - public void setWeigth(BigInteger Weight){ - weight = Weight; - } - - public long getTo(){ - return indexTo; - } - public long getFrom(){ - return indexFrom; - } - public String getName(){ - return relationName; - } - public void setName(String name){ - relationName = name; - } - - public RelationEdge(String name,long from,long to){ - relationName = name; - indexFrom = from; - indexTo = to; - } - @Override - public String toString(){ - return "["+relationName+": from "+indexFrom+" to " +indexTo+" nameFrom "+categoryFrom+" nameTo "+categoryTo+"]"; - } - - public void setCategoryFrom(String categoryFrom) { - this.categoryFrom = categoryFrom; - } - - public String getCategoryFrom() { - return categoryFrom; - } - - public void setCategoryTo(String categoryTo) { - this.categoryTo = categoryTo; - } - - public String getCategoryTo() { - return categoryTo; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/TreeExtractor.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/TreeExtractor.java deleted file mode 100644 index 75fdfe5..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/guesser/treeStructure/graph/TreeExtractor.java +++ /dev/null @@ -1,68 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.treeStructure.graph; - -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; - -import org.hibernate.SessionFactory; - -public class TreeExtractor { - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - TreeNode categoriesTree; - - //recupera l'albero delle categorie - public TreeNode getCategoriesTree(SessionFactory DB){ - return categoriesTree; - } - - //creo un nuovo Albero - public TreeExtractor(){ - categoriesTree = new TreeNode(TreeNode.ROOT); - } - - class TreeNode implements Iterable { - - public static final String ROOT = "ROOT"; - - private Set children; - public String name; - - public TreeNode(String Name) { - children = new HashSet(); - name = Name; - } - - public String getName(){ - return name; - } - - public boolean addChild(TreeNode n) { - return children.add(n); - } - - public boolean removeChild(TreeNode n) { - return children.remove(n); - } - - public Iterator iterator() { - return children.iterator(); - } - - public boolean isLeaf(){ - return ((children==null) || (children.size()==0)); - } - - public boolean isRoot(){ - return (name.equals(ROOT)); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/run/CategoryGuesser.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/run/CategoryGuesser.java deleted file mode 100644 index 12326e9..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/run/CategoryGuesser.java +++ /dev/null @@ -1,489 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.run; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.Engine; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.CategoryOrderedList; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.TSObjectTransformer; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.interfaces.Reference; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.hibernate.SessionFactory; - -public class CategoryGuesser { - - /** - * @param args - * @throws Exception - */ - - private final static int MAXRESULTS = 10; - - public static void showResults(ArrayList results) { - - AnalysisLogger.getLogger().warn("CLASSIFICATION RESULT:\n"); - int i = 1; - for (SingleResult result : results) { - if (result.getColumn() != null) - AnalysisLogger.getLogger().warn(i + ": " + result.getCategory() + " - " + result.getColumn() + " ; SCORE: " + result.getStringScore() + "%"); - else - AnalysisLogger.getLogger().warn(i + ": " + result.getCategory() + " ; SCORE: " + result.getStringScore() + "%"); - - i++; - } - - } - - public static void AccuracyCalc(CategoryGuesser guesser, String configPath, String seriesName, String column, int attempts, String correctFamily, String correctColumn) throws Exception { - AccuracyCalc(null, guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - } - - public static void AccuracyCalc(LexicalEngineConfiguration externalcfg, CategoryGuesser guesser, String configPath, String seriesName, String column, int attempts, String correctFamily, String correctColumn) throws Exception { - - int familyscore = 0; - int columnscore = 0; - // CategoryGuesser guesser = new CategoryGuesser(); - - for (int i = 0; i < attempts; i++) { - - guesser.runGuesser(seriesName, column, externalcfg); - ArrayList results = guesser.getClassification(); - String result = results.toString(); - showResults(results); - - AnalysisLogger.getLogger().info("CLASSIFICATION RESULT " + result + " " + CategoryGuesser.resultString(result, correctFamily, correctColumn)); - - if (CategoryGuesser.CheckCompleteResult(result, correctFamily, correctColumn)) - columnscore++; - - if (CategoryGuesser.CheckFamilyResult(result, correctFamily)) - familyscore++; - - } - - double percColumn = ((double) columnscore / (double) attempts) * 100; - double percFamily = ((double) familyscore / (double) attempts) * 100; - - AnalysisLogger.getLogger().info("->ACCURACY ON FAMILY " + correctFamily + ":" + percFamily + " ACCURACY ON COLUMN " + correctColumn + ":" + percColumn); - } - - public static String resultString(String result, String family, String column) { - - result = result.toUpperCase(); - family = family.toUpperCase(); - column = column.toUpperCase(); - - return "FAMILY REC: " + result.contains(family) + " COLUMN REC: " + result.contains(family + "=" + column); - } - - public static boolean CheckCompleteResult(String result, String family, String column) { - - result = result.toUpperCase(); - family = family.toUpperCase(); - column = column.toUpperCase(); - if (result.contains(family + "=" + column)) - return true; - else - return false; - } - - public static boolean CheckFamilyResult(String result, String family) { - - result = result.toUpperCase(); - family = family.toUpperCase(); - - if (result.contains(family + "=")) - return true; - else - return false; - } - - // NOTE: The config path has to contain the two files: lexicalGuesser.properties and ALog.properties - private static final String cfgFile = "lexicalGuesser.properties"; - private static final String LogFile = "ALog.properties"; - // singleton - private CategoryOrderedList col; - private Engine processor; - private CategoryOrderedList originalCol; - private LexicalEngineConfiguration config; - private String configPath; - private boolean oneshotMode; - private static final int maxTriesClassification = 3; - private int triesCounter; - - public CategoryGuesser(String ConfigPath) { - - triesCounter = 0; - this.configPath = ConfigPath; - } - - public CategoryGuesser() { - triesCounter = 0; - this.configPath = "."; - } - - public void runGuesser(String seriesName, String columnName, LexicalEngineConfiguration externalConfig) throws Exception { - runGuesser(seriesName, columnName, externalConfig, null, null); - } - - public void runGuesser(String seriesName, String columnName) throws Exception { - runGuesser(seriesName, columnName, null, null, null); - } - - public void runGuesser(String seriesName, String columnName, LexicalEngineConfiguration externalConfig, String CategoryFilter, String ColumnFilter) throws Exception { - runGuesser(seriesName, columnName, externalConfig, CategoryFilter, ColumnFilter, null); - } - - public void runGuesser(String SingletonString, LexicalEngineConfiguration externalConfig, String CategoryFilter, String ColumnFilter) throws Exception { - oneshotMode = true; - runGuesser(null, null, externalConfig, CategoryFilter, ColumnFilter, SingletonString); - } - - public void init(String categoryFilter, String columnFilter, LexicalEngineConfiguration externalConfig) throws Exception { - - String cfgFileCompletePath = configPath + "/" + cfgFile; - AnalysisLogger.setLogger(configPath + "/" + LogFile); - - AnalysisLogger.getLogger().trace("******************INITIALIZING******************"); - - config = new LexicalEngineConfiguration(); - config.configure(cfgFileCompletePath); - - if (externalConfig != null) { - config.mergeConfig(externalConfig); - } - - processor = new Engine(config, columnFilter, configPath); - - SessionFactory dbSession = processor.getDBSession(config); - DBObjectTranslator dbo = new DBObjectTranslator(); - - if (col == null) { - AnalysisLogger.getLogger().trace("******************Order Category******************"); - if (externalConfig == null) - externalConfig = new LexicalEngineConfiguration(); - dbo.buildCategoriesStructure(dbSession, externalConfig.getReferenceTable(), externalConfig.getReferenceColumn(), externalConfig.getIdColumn(), externalConfig.getNameHuman(), externalConfig.getDescription()); - col = TSObjectTransformer.transform2List(dbo, config, categoryFilter); - AnalysisLogger.getLogger().trace("***************End Ordering********************"); - originalCol = col.generateNovelList(); - } else { - col = originalCol.generateNovelList(); - } - - oneshotMode = false; - } - - public void initSingleMatcher(LexicalEngineConfiguration externalConfig, String ColumnFilter) throws Exception { - - String cfgFileCompletePath = configPath + "/" + cfgFile; - AnalysisLogger.setLogger(configPath + "/" + LogFile); - - config = new LexicalEngineConfiguration(); - config.configure(cfgFileCompletePath); - - if (externalConfig != null) { - config.mergeConfig(externalConfig); - } - - processor = new Engine(config, ColumnFilter, configPath); - - // in this case, the lexical matcher is invoked once, then it has to be stopped in the end - oneshotMode = true; - } - - public void init(String categoryFilter, String columnFilter) throws Exception { - init(categoryFilter, columnFilter, null); - } - - public void init(LexicalEngineConfiguration externalConfig) throws Exception { - init(null, null, externalConfig); - } - - public void init() throws Exception { - init(null, null, null); - } - - public void refreshReferences() { - col = null; - } - - public void runGuesser(String seriesName, String columnName, LexicalEngineConfiguration externalConfig, String CategoryFilter, String ColumnFilter, String SingletonString) throws Exception { - - String cfgFileCompletePath = configPath + "/" + cfgFile; - AnalysisLogger.setLogger(configPath + "/" + LogFile); - - AnalysisLogger.getLogger().debug("Guessing Table " + seriesName + " column " + columnName); - if (externalConfig != null) { - config = new LexicalEngineConfiguration(); - config.configure(cfgFileCompletePath); - config.mergeConfig(externalConfig); - - // NOTE FOR FUTURE OPTIMIZATION: perform the re-init only if there is a change in the Database pointing - processor = new Engine(config, ColumnFilter, configPath); - } else { - if (config == null) { - config = new LexicalEngineConfiguration(); - config.configure(cfgFileCompletePath); - - } - if (processor == null) { - processor = new Engine(config, ColumnFilter, configPath); - } else - processor.resetEngine(config, ColumnFilter, configPath); - } - - SessionFactory dbSession = processor.getDBSession(config); - DBObjectTranslator dbo = new DBObjectTranslator(); - - //modification of 10/10/11 calculate structure each time -// if (col == null) { - AnalysisLogger.getLogger().trace("******************Order Category******************"); - dbo.buildCategoriesStructure(dbSession, config.getReferenceTable(), config.getReferenceColumn(), config.getIdColumn(), config.getNameHuman(), config.getDescription()); - col = TSObjectTransformer.transform2List(dbo, config, CategoryFilter); - AnalysisLogger.getLogger().trace("***************End Ordering********************"); - originalCol = col.generateNovelList(); - /* - } else { - col = originalCol.generateNovelList(); - } - */ - - AnalysisLogger.getLogger().warn("Starting Calculation...wait..."); - - long t0 = System.currentTimeMillis(); - - // processor.calcLike(col,seriesName, columnName); - - processor.calcLikeThread(col, seriesName, columnName, SingletonString); - - // perform processing until the table contains at least one element - ArrayList checkingResults = null; - - // if (oneshotMode) - // checkingResults = getClassification(); - // else - checkingResults = getClassification(); - - while ((checkingResults == null || checkingResults.size() == 0) && (triesCounter < maxTriesClassification)) { - AnalysisLogger.getLogger().warn("..another processing pass is required. Attempt number " + (triesCounter + 1)); - triesCounter++; - float differencialThr = config.getCategoryDiscardDifferencialThreshold(); - float acceptanceThr = config.getEntryAcceptanceThreshold(); - // reduce the thresholds of 10 points and recalculate - config.setCategoryDiscardDifferencialThreshold(Math.max(differencialThr - 20, 0)); - config.setEntryAcceptanceThreshold(Math.max(acceptanceThr - 20, 0)); - AnalysisLogger.getLogger().trace("Performing next processing pass"); - runGuesser(seriesName, columnName, null, CategoryFilter, ColumnFilter, SingletonString); - AnalysisLogger.getLogger().debug("End processing pass"); - - // if (oneshotMode) - // checkingResults = getClassification(); - // else - checkingResults = getClassification(); - - if (triesCounter == 0) - break; - } - - long t1 = System.currentTimeMillis() - t0; - - AnalysisLogger.getLogger().warn("...End Calculation in " + t1 + "ms"); - - triesCounter = 0; - // close session if not more necessary - if (oneshotMode) - dbSession.close(); - } - - public ArrayList getClassificationOLD() { - - ArrayList results = new ArrayList(); - int size = processor.bestCategories.size(); - for (int i = 0; i < size; i++) { - results.add(new SingleResult(processor.bestCategories.get(i), processor.bestColumns.get(i), processor.bestScores.get(i), null, "0")); - } - - return results; - } - - public ArrayList getDetailedMatches() { - - if (processor.getSingletonMatches() != null) { - - // use deviation to cut results - float threshold = config.getSingleEntryRecognitionMaxDeviation(); - ArrayList results = processor.getSingletonMatches(); - double minScore = 0; - // get the best result and calculate the threshold - if (results.size() > 0) { - minScore = results.get(0).getScore() - threshold; - } - - // remove poor objects - int size = results.size(); - for (int i = 0; i < size; i++) { - SingleResult sr = results.get(i); - if (sr.getScore() < minScore) { - results.remove(i); - i--; - size--; - } - } - - return processor.getSingletonMatches(); - } else - return new ArrayList(); - } - - public String getDetailedSingletonEntry() { - - if (processor.getSingletonElement() != null) { - return processor.getSingletonElement(); - } else - return ""; - } - - public ArrayList getClassificationPlain() { - - ArrayList results = new ArrayList(); - int size = processor.bestCategories.size(); - double maxscore = 0; - - for (int i = 0; i < size; i++) { - double score = processor.bestScores.get(i); - if (maxscore < score) { - maxscore = score; - } - } - - for (int i = 0; i < size; i++) { - - double score = processor.bestScores.get(i); - // normalizing percentages!!! - score = (score / (maxscore + ((size > 1) ? 1 : 0))) * 100; - - if (score > config.categoryDiscardDifferencialThreshold) { - - Reference ref = col.getCategory(processor.bestCategories.get(i)); - - results.add(new SingleResult(processor.bestCategories.get(i), processor.bestColumns.get(i), score, ref.getTableName(), ref.getIndex())); - } - } - - return results; - } - - public ArrayList getClassification() { - - ArrayList results = new ArrayList(); - int size = processor.bestCategories.size(); - double maxscore = 0; - - BigDecimal sumElements = BigDecimal.ZERO; - ArrayList subscores = new ArrayList(); - - // calculate sum of elements and weights; - for (int i = 0; i < size; i++) { - BigInteger catElements = col.getScoresTable().get(processor.bestCategories.get(i)).getCategoryElements(); - sumElements = sumElements.add(new BigDecimal(catElements)); - } -/* - if (sumElements.compareTo(BigDecimal.valueOf(10000)) < 0) - return getClassificationPlain(); -*/ - for (int i = 0; i < size; i++) { - double score = processor.bestScores.get(i); - // multiply for impotance - BigInteger catElements = col.getScoresTable().get(processor.bestCategories.get(i)).getCategoryElements(); - - // AnalysisLogger.getLogger().warn("\t elements "+catElements+" sum "+sumElements); - - double weight = new BigDecimal(catElements).divide(sumElements, 2, BigDecimal.ROUND_HALF_UP).doubleValue(); - - if (weight >= 3) - weight = 2 * Math.log(weight * 100) / 10f; - else if ((weight >= 0.5) && (weight <= 1)) - { - weight = Math.log(weight * 100) / 100.00f; - } - else if (weight < 0.05) - weight = 0.05; - - AnalysisLogger.getLogger().warn("WEIGHT FOR CATEGORY " + processor.bestCategories.get(i) + "-" + processor.bestColumns.get(i) + " : " + weight + " SCORE " + score); - - // recalculate weights - score = score * weight; - score = Math.min(1, score); - - if (maxscore < score) { - maxscore = score; - } - - subscores.add(score); - } - // AnalysisLogger.getLogger().warn("MAX SCORE "+maxscore); - - for (int i = 0; i < size; i++) { - - // double score = processor.bestScores.get(i); - double score = subscores.get(i); - - // AnalysisLogger.getLogger().warn("SCORE FOR CATEGORY "+processor.bestCategories.get(i)+" -COLUMN : "+processor.bestColumns.get(i)+" - "+score); - - // normalizing percentages!!! - score = (score / (maxscore + ((size > 1) ? 1 : 0))) * 100; - - // AnalysisLogger.getLogger().warn("SCORE FOR CATEGORY "+processor.bestCategories.get(i)+" -COLUMN : "+processor.bestColumns.get(i)+" - "+score); - if (score > config.categoryDiscardDifferencialThreshold) { - // AnalysisLogger.getLogger().warn("SCORE "+score); - // insert into the right place - int index = results.size(); - int j = 0; - for (SingleResult res : results) { - if (res.getScore() < score) { - index = j; - } - j++; - } - - Reference ref = col.getCategory(processor.bestCategories.get(i)); - SingleResult sr = new SingleResult(processor.bestCategories.get(i), processor.bestColumns.get(i), score, ref.getTableName(), ref.getIndex()); - //control for repetitions - if (isnotRepetition(sr, results)) - results.add(index, sr); - } - } - - //limit the result list after rescoring - int s = results.size(); - if (s>MAXRESULTS){ - int diff = (size-MAXRESULTS); - for (int i=0;i previous) { - - boolean notrepeated = true; - int size = previous.size(); - for (int i = 0; i < size; i++) { - SingleResult sr = previous.get(i); - if (sr.getCategory().equalsIgnoreCase(result.getCategory()) && sr.getColumn().equalsIgnoreCase(result.getColumn())) { - notrepeated = true; - break; - } - } - - return notrepeated; - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/run/StarGraphExtraction.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/run/StarGraphExtraction.java deleted file mode 100644 index df43655..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/run/StarGraphExtraction.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.run; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.DBObjectTranslator; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.TSObjectTransformer; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.DatabaseFactory; -import org.hibernate.SessionFactory; - -public class StarGraphExtraction { - - /** - * @param args - */ - public static void main(String[] args) { - try { - RunMain(); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - private final static String ConfigurationFileNameLocal = "hibernate.cfg.xml"; - - public static void RunMain() throws Exception{ - AnalysisLogger.setLogger("./ALog.properties"); - - //configurazione DB - inizializzo la sessione e mi connetto - SessionFactory dbSession = DatabaseFactory.initDBConnection(ConfigurationFileNameLocal); - DBObjectTranslator dbo = new DBObjectTranslator(); - dbo.buildWholeStructure(dbSession,null,null,null,null,null); - TSObjectTransformer.transform2Graph(dbo); - - } -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/TestExternalCfgProduction.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/TestExternalCfgProduction.java deleted file mode 100644 index e476f8f..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/TestExternalCfgProduction.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class TestExternalCfgProduction { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - - String seriesName = "IMPORT_ecd2e3a0_ee90_11e0_be9e_90f3621758ee"; - String column = "field4"; - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - conf.setReferenceTable("codelist1733371938"); - conf.setReferenceColumn("ifield14"); - conf.setNameHuman("ifield1"); - conf.setIdColumn("ifield0"); - conf.setDescription("ifield2"); - - - //database Parameters - conf.setDatabaseUserName("gcube"); - conf.setDatabasePassword("d4science2"); - conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseURL("jdbc:postgresql://localhost/testdb"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - - guesser.runGuesser(seriesName, column, conf); - guesser.showResults(guesser.getClassification()); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/TestSingleExternalCfgProduction.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/TestSingleExternalCfgProduction.java deleted file mode 100644 index 19e55a5..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/TestSingleExternalCfgProduction.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class TestSingleExternalCfgProduction { - - public static void main(String[] args) { - - try { - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String singleton = "Faroe Island"; - - String family = "COUNTRY_OLD"; - String column = "field6"; - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - conf.setReferenceTable("codelist1733371938"); - conf.setReferenceColumn("ifield14"); - conf.setNameHuman("ifield1"); - conf.setIdColumn("ifield0"); - conf.setDescription("ifield2"); - - //CHANGE THIS TO ENHANCE THE RECALL - conf.setEntryAcceptanceThreshold(30); - conf.setReferenceChunksToTake(-1); - conf.setTimeSeriesChunksToTake(-1); - conf.setUseSimpleDistance(false); - - //database Parameters - conf.setDatabaseUserName("gcube"); - conf.setDatabasePassword("d4science2"); -// conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseURL("jdbc:postgresql://localhost/testdb"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - - guesser.runGuesser(singleton, conf, family,column ); - - ArrayList detailedResults = guesser.getDetailedMatches(); - - AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton); - - CategoryGuesser.showResults(detailedResults); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest1.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest1.java deleted file mode 100644 index 735864e..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest1.java +++ /dev/null @@ -1,58 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTest1 { - - public static void main(String[] args) { - - try { - int attempts = 1; - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592"; - String column = "field1"; - String correctFamily = "country"; - String correctColumn = "name_en"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - //bench 2 - AnalysisLogger.getLogger().warn("----------------------BENCH 2-------------------------"); - seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592"; - column = "field2"; - correctFamily = "area"; - correctColumn = "name_en"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 2-----------------------\n"); - - //bench 3 - AnalysisLogger.getLogger().warn("----------------------BENCH 3-------------------------"); - seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592"; - column = "field4"; - correctFamily = "species"; - correctColumn = "scientific_name"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 3-----------------------\n"); - - //bench 4 - AnalysisLogger.getLogger().warn("----------------------BENCH 4-------------------------"); - seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592"; - column = "field3"; - correctFamily = "species"; - correctColumn = "scientific_name"; -// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 4-----------------------\n"); - - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest2.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest2.java deleted file mode 100644 index 352b4a2..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest2.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTest2 { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6"; - String column = "field1"; - String correctFamily = "SPECIES"; - String correctColumn = "SCIENTIFIC_NAME"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - - - - //bench 2 - AnalysisLogger.getLogger().warn("----------------------BENCH 2-------------------------"); - seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6"; - column = "field2"; - correctFamily = "COUNTRY"; - correctColumn = "ISO_3_CODE"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 2-----------------------\n"); - - - //bench 4 - AnalysisLogger.getLogger().warn("----------------------BENCH 4-------------------------"); - seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6"; - column = "field3"; - correctFamily = "AREA"; - correctColumn = "NAME_EN"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 4-----------------------\n"); - - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest3.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest3.java deleted file mode 100644 index cfdd033..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest3.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTest3 { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6"; - String column = "field1"; - String correctFamily = "SPECIES"; - String correctColumn = "SCIENTIFIC_NAME"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest4.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest4.java deleted file mode 100644 index a84356d..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest4.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTest4 { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_2c97f580_35a0_11df_b8b3_aa10916debe6"; - String column = "field3"; - String correctFamily = "AREA"; - String correctColumn = "NAME_EN"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest5.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest5.java deleted file mode 100644 index 55f954b..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTest5.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTest5 { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_532bba80_1c8f_11df_a4ee_87804054691e"; - String column = "field2"; - String correctFamily = "ISSCAAP GROUP"; - String correctColumn = "NAME_EN"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestExternalCfg.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestExternalCfg.java deleted file mode 100644 index 5b3bf5e..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestExternalCfg.java +++ /dev/null @@ -1,52 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTestExternalCfg { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_532bba80_1c8f_11df_a4ee_87804054691e"; - String column = "field2"; - String correctFamily = "ISSCAAP GROUP"; - String correctColumn = "NAME_EN"; - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - conf.setCategoryDiscardDifferencialThreshold(5); - conf.setCategoryDiscardThreshold(0); - conf.setChunkSize(25); - conf.setEntryAcceptanceThreshold(50); - conf.setNumberOfThreadsToUse(2); - conf.setRandomTake(true); - conf.setReferenceChunksToTake(20); - conf.setTimeSeriesChunksToTake(1); - conf.setUseSimpleDistance(false); - - //database Parameters - conf.setDatabaseUserName("root"); -// conf.setDatabasePassword("password"); - conf.setDatabaseDriver("com.mysql.jdbc.Driver"); - conf.setDatabaseURL("jdbc:mysql://localhost/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.MySQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - CategoryGuesser.AccuracyCalc(conf,guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestFilterCategory.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestFilterCategory.java deleted file mode 100644 index 6a76403..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestFilterCategory.java +++ /dev/null @@ -1,38 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - - -public class BenchMarkTestFilterCategory { - - public static void main(String[] args) { - - try { - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "ref_order"; - String column = "scientific_name"; - String correctFamily = "order"; - String correctColumn = "scientific_name"; - - guesser.runGuesser(seriesName, column, null, correctFamily, correctColumn); - ArrayList results = guesser.getClassification(); - - CategoryGuesser.showResults(results); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestSingleton.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestSingleton.java deleted file mode 100644 index b5bf8ce..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestSingleton.java +++ /dev/null @@ -1,51 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - - -public class BenchMarkTestSingleton { - - public static void main(String[] args) { - - try { - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String singleton = "sarda sarda"; -// String singleton = "Mitella pollicipes"; -// String singleton = "policipes"; -// String singleton = ""; - String family = "catalog life"; - String column = "scientific_name"; - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - - //CHANGE THIS TO ENHANCE THE RECALL - conf.setEntryAcceptanceThreshold(30); - conf.setReferenceChunksToTake(-1); - conf.setTimeSeriesChunksToTake(-1); - conf.setUseSimpleDistance(false); - - guesser.runGuesser(singleton, conf, family,column ); - ArrayList detailedResults = guesser.getDetailedMatches(); - - AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton); - - CategoryGuesser.showResults(detailedResults); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestTSCountry.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestTSCountry.java deleted file mode 100644 index 70be422..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTestTSCountry.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTestTSCountry { - - public static void main(String[] args) { - - try { - int attempts = 1; - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "import_bdefb470_5cea_11df_a0a6_909e7d074592"; - String column = "field1"; - String correctFamily = "country"; - String correctColumn = "name_en"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTrainingSet.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTrainingSet.java deleted file mode 100644 index 4616735..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTrainingSet.java +++ /dev/null @@ -1,88 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTrainingSet { - - -public static void main(String[] args) { - - try { - String configPath ="."; - int attempts = 1; - CategoryGuesser guesser = new CategoryGuesser(configPath); - - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "ref_commission"; - String column = "name_en"; - String correctFamily = "commission"; - String correctColumn = "name_en"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - - AnalysisLogger.getLogger().warn("----------------------BENCH 2-------------------------"); - seriesName = "ref_species"; - column = "scientific_name"; - correctFamily = "species"; - correctColumn = "scientific_name"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 2-----------------------\n"); - - - AnalysisLogger.getLogger().warn("----------------------BENCH 3-------------------------"); - seriesName = "ref_area"; - column = "name_en"; - correctFamily = "area"; - correctColumn = "name_en"; -// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 3-----------------------\n"); - - - AnalysisLogger.getLogger().warn("----------------------BENCH 4-------------------------"); - seriesName = "ref_ocean"; - column = "name_en"; - correctFamily = "ocean"; - correctColumn = "name_en"; -// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 4-----------------------\n"); - - - AnalysisLogger.getLogger().warn("----------------------BENCH 5-------------------------"); - seriesName = "ref_geo_region"; - column = "name_en"; - correctFamily = "geo region"; - correctColumn = "name_en"; -// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 5-----------------------\n"); - - - AnalysisLogger.getLogger().warn("----------------------BENCH 6-------------------------"); - seriesName = "ref_fa_region"; - column = "name_en"; - correctFamily = "fa region"; - correctColumn = "name_en"; -// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 6-----------------------\n"); - - - AnalysisLogger.getLogger().warn("----------------------BENCH 7-------------------------"); - seriesName = "ref_order"; - column = "scientific_name"; - correctFamily = "order"; - correctColumn = "scientific_name"; -// CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 7-----------------------\n"); - - - - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTrainingSetScientificName.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTrainingSetScientificName.java deleted file mode 100644 index e8a0694..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/BenchMarkTrainingSetScientificName.java +++ /dev/null @@ -1,33 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class BenchMarkTrainingSetScientificName { - - -public static void main(String[] args) { - - try { - String configPath ="."; - int attempts = 1; - CategoryGuesser guesser = new CategoryGuesser(configPath); - - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String seriesName = "ref_species"; - String column = "scientific_name"; - String correctFamily = "species"; - String correctColumn = "scientific_name"; - CategoryGuesser.AccuracyCalc(guesser, configPath, seriesName, column, attempts, correctFamily, correctColumn); - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/TestExternalCfgProduction.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/TestExternalCfgProduction.java deleted file mode 100644 index 23106a8..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/TestExternalCfgProduction.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class TestExternalCfgProduction { - - public static void main(String[] args) { - - try { - int attempts = 1; - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); -// String seriesName = "rdmc366dfe0ddf511e086b1b1c5d6fb1c27"; - String seriesName = "IMPORT_ecd2e3a0_ee90_11e0_be9e_90f3621758ee"; - - String column = "field4"; - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - /* - conf.setCategoryDiscardDifferencialThreshold(5); - conf.setCategoryDiscardThreshold(0); - conf.setChunkSize(25); - conf.setEntryAcceptanceThreshold(50); - conf.setNumberOfThreadsToUse(2); - conf.setRandomTake(true); - conf.setReferenceChunksToTake(20); - conf.setTimeSeriesChunksToTake(1); - conf.setUseSimpleDistance(false); - */ - - //database Parameters - conf.setDatabaseUserName("utente"); - conf.setDatabasePassword("d4science"); -// conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseURL("jdbc:postgresql://dbtest.next.research-infrastructures.eu/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - conf.setReferenceTable("codelist1733371938"); - conf.setReferenceColumn("ifield14"); - conf.setNameHuman("ifield1"); - conf.setIdColumn("ifield0"); - conf.setDescription("ifield2"); - guesser.runGuesser(seriesName, column, conf); - guesser.showResults(guesser.getClassification()); -// AnalysisLogger.getLogger().warn(); - - - - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/TestSingleExternalCfgProduction.java b/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/TestSingleExternalCfgProduction.java deleted file mode 100644 index 375f4af..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/analysis/test/old/TestSingleExternalCfgProduction.java +++ /dev/null @@ -1,71 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.analysis.test.old; - -import java.util.ArrayList; - -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.analysis.guesser.data.SingleResult; -import org.gcube.contentmanagement.lexicalmatcher.analysis.run.CategoryGuesser; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class TestSingleExternalCfgProduction { - - public static void main(String[] args) { - - try { - - - String configPath = "."; - CategoryGuesser guesser = new CategoryGuesser(configPath); - //bench 1 - AnalysisLogger.getLogger().warn("----------------------BENCH 1-------------------------"); - String singleton = "Faroe Island"; -// String singleton = "Mitella pollicipes"; -// String singleton = "policipes"; -// String singleton = ""; -// String family = "rdf0a7fb500dd3d11e0b8d1d1e2e7ba4f9d"; - - String family = "COUNTRY_OLD"; - String column = "field6"; - - LexicalEngineConfiguration conf = new LexicalEngineConfiguration(); - - //CHANGE THIS TO ENHANCE THE RECALL - conf.setEntryAcceptanceThreshold(30); - conf.setReferenceChunksToTake(-1); - conf.setTimeSeriesChunksToTake(-1); - conf.setUseSimpleDistance(false); - //database Parameters - conf.setDatabaseUserName("utente"); - conf.setDatabasePassword("d4science"); -// conf.setDatabaseDriver("org.postgresql.Driver"); - conf.setDatabaseURL("jdbc:postgresql://dbtest.next.research-infrastructures.eu/timeseries"); - conf.setDatabaseDialect("org.hibernate.dialect.PostgreSQLDialect"); - conf.setDatabaseAutomaticTestTable("connectiontesttable"); - conf.setDatabaseIdleConnectionTestPeriod("3600"); - - conf.setReferenceTable("codelist1733371938"); - conf.setReferenceColumn("ifield14"); - conf.setNameHuman("ifield1"); - conf.setIdColumn("ifield0"); - conf.setDescription("ifield2"); - - guesser.initSingleMatcher(conf,column ); - - guesser.runGuesser(singleton, null, family,column ); - - ArrayList detailedResults = guesser.getDetailedMatches(); - - AnalysisLogger.getLogger().warn("Detailed Match on Name :"+singleton); - - CategoryGuesser.showResults(detailedResults); - - AnalysisLogger.getLogger().warn("--------------------END BENCH 1-----------------------\n"); - - - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/utils/AnalysisLogger.java b/src/org/gcube/contentmanagement/lexicalmatcher/utils/AnalysisLogger.java deleted file mode 100644 index e6abc67..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/utils/AnalysisLogger.java +++ /dev/null @@ -1,37 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.utils; - -import org.apache.log4j.Logger; -import org.apache.log4j.PropertyConfigurator; - -public class AnalysisLogger { - - - private static Logger logger; - private static Logger hibernateLogger; - - public static Logger getLogger(){ - - if (logger == null){ - setLogger("./ALog.properties"); - logger = Logger.getLogger("AnalysisLogger"); - } - - return logger; - } - //in ingresso vuole il path al file di config del log4j - public static void setLogger(String path){ - if (logger == null){ - PropertyConfigurator.configure(path); - } - logger = Logger.getLogger("AnalysisLogger"); - hibernateLogger = Logger.getLogger("hibernate"); - } - - public static void printStackTrace(Exception e){ - - int numberoflines = e.getStackTrace().length; - for (int i=0;i nodes = document.selectNodes("//hibernate-configuration/session-factory/property"); - Iterator nodesIterator = nodes.iterator(); - -// System.out.println("--- DATABASE Configuration --- "); - - while (nodesIterator.hasNext()) { - Node currentnode = nodesIterator.next(); - String element = currentnode.valueOf("@name"); - if (element.equals("connection.driver_class")) - if (config.getDatabaseDriver() != null){ - currentnode.setText(config.getDatabaseDriver()); - } - if (element.equals("connection.url")) { - if (config.getDatabaseURL() != null) - currentnode.setText(config.getDatabaseURL()); - } - if (element.equals("connection.username")) { - if (config.getDatabaseUserName() != null) - currentnode.setText(config.getDatabaseUserName()); - } - if (element.equals("connection.password")) { - if (config.getDatabasePassword() != null) - currentnode.setText(config.getDatabasePassword()); - } - if (element.equals("dialect")) { - AnalysisLogger.getLogger().trace("Dialect -> "+config.getDatabaseDialect()); - if (config.getDatabaseDialect() != null) - currentnode.setText(config.getDatabaseDialect()); - } - if (element.equals("c3p0.idleConnectionTestPeriod")) { - if (config.getDatabaseIdleConnectionTestPeriod() != null) - currentnode.setText(config.getDatabaseIdleConnectionTestPeriod()); - } - if (element.equals("c3p0.automaticTestTable")) { - if (config.getDatabaseAutomaticTestTable() != null) - currentnode.setText(config.getDatabaseAutomaticTestTable()); - } - } - - Configuration cfg = new Configuration(); - cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(document.asXML().getBytes()))); - cfg.setProperty("hibernate.hbm2ddl.auto", "create"); - - SessionFactory DBSessionFactory = null; - DBSessionFactory = cfg.buildSessionFactory(); - - // close stream - stream.close(); - - - - - return DBSessionFactory; - } - - @SuppressWarnings({"unchecked"}) - public static List executeHQLQuery(String query, SessionFactory DBSessionFactory, boolean useSQL) { - - List obj = null; - Session ss = null; - try { - ss = DBSessionFactory.getCurrentSession(); - - ss.beginTransaction(); - - Query qr = null; - - if (useSQL) - qr = ss.createSQLQuery(query); - else - qr = ss.createQuery(query); - - List result = qr.list(); - - ss.getTransaction().commit(); - - /* - if (result == null) - System.out.println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object"); - - if (result != null && result.size() == 0) - System.out.println(String.format("found nothing in database")); -*/ - if (result != null && result.size() != 0) { - obj = result; - } - - } catch (Exception e) { - -// System.out.println(String.format("Error while executing query: %1$s %2$s", query, e.getMessage())); - e.printStackTrace(); - rollback(ss); - } - - return obj; - - } - - public static void executeHQLUpdate(String query, SessionFactory DBSessionFactory, boolean useSQL) { -// System.out.println("executing query: " + query); - Session ss = null; - - try { - - ss = DBSessionFactory.getCurrentSession(); -// System.out.println("executing query"); - ss.beginTransaction(); - Query qr = null; - - if (useSQL) - qr = ss.createSQLQuery(query); - else - qr = ss.createQuery(query); - - qr.executeUpdate(); - ss.getTransaction().commit(); - - } catch (Exception e) { - rollback(ss); - e.printStackTrace(); - } - } - - public static void executeSQLUpdate(String query, SessionFactory DBSessionFactory) { - executeHQLUpdate(query, DBSessionFactory, true); - } - - public static List executeSQLQuery(String query, SessionFactory DBSessionFactory) { - return executeHQLQuery(query, DBSessionFactory, true); - } - - public static void rollback(Session ss) { - - try { - if (ss != null && ss.getTransaction() != null) - ss.getTransaction().rollback(); - } catch (Exception ex) { - - } finally { - try { - ss.close(); - } catch (Exception ee) { - } - } - } - - public static void saveObject(Object obj, SessionFactory DBSessionFactory) throws Exception { - if (DBSessionFactory != null) { - Session ss = null; - try { - ss = DBSessionFactory.getCurrentSession(); - ss.beginTransaction(); - ss.saveOrUpdate(obj); - ss.getTransaction().commit(); - } catch (Exception e) { - rollback(ss); - throw e; - } - } - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/utils/DistanceCalculator.java b/src/org/gcube/contentmanagement/lexicalmatcher/utils/DistanceCalculator.java deleted file mode 100644 index d5d2c1c..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/utils/DistanceCalculator.java +++ /dev/null @@ -1,189 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.utils; - -public class DistanceCalculator { - - // **************************** - // Get minimum of three values - // **************************** - - private int Minimum(int a, int b, int c) { - int mi; - - mi = a; - if (b < mi) { - mi = b; - } - if (c < mi) { - mi = c; - } - return mi; - - } - - // ***************************** - // Compute Levenshtein distance - // ***************************** - - public int LD(String s, String t) { - int d[][]; // matrix - int n; // length of s - int m; // length of t - int i; // iterates through s - int j; // iterates through t - char s_i; // ith character of s - char t_j; // jth character of t - int cost; // cost - - // Step 1 - - n = s.length(); - m = t.length(); - if (n == 0) { - return m; - } - if (m == 0) { - return n; - } - d = new int[n + 1][m + 1]; - - // Step 2 - - for (i = 0; i <= n; i++) { - d[i][0] = i; - } - - for (j = 0; j <= m; j++) { - d[0][j] = j; - } - - // Step 3 - - for (i = 1; i <= n; i++) { - - s_i = s.charAt(i - 1); - - // Step 4 - - for (j = 1; j <= m; j++) { - - t_j = t.charAt(j - 1); - - // Step 5 - - if (s_i == t_j) { - cost = 0; - } else { - cost = 1; - } - - // Step 6 - - d[i][j] = Minimum(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1] + cost); - - } - - } - - // Step 7 - - return d[n][m]; - - } - - // ***************************** - // Calculate Complete Distance - // ***************************** - public double CD(boolean useSimpleDistance, String h, String t) { - return CD(useSimpleDistance, h, t,false,false); - } - //output will be a percentage. 1 will mean a complete agreement between the inputs - public double CD(boolean useSimpleDistance, String h, String t, boolean ignoreCase, boolean boostMatch) { - - - - double distance = 0; - if ((h == null) && (t == null)) { - distance = 1; - } - else if ((h != null) && (t != null)) { - - h = treatString(h,ignoreCase); - t = treatString(t,ignoreCase); - int lt = t.length(); - int lh = h.length(); - double matchFactor = 1.5f; - if (boostMatch) - matchFactor = 2f; - - if (((lt==0)&&(lh!=0))||((lt!=0)&&(lh==0))) - distance = 0; - else if (h.equalsIgnoreCase(t)){ - distance = 1; - } - else if (useSimpleDistance) { - distance = 0; - } - else if (t.contains(h)) { - // calcolo la percentuale di contenimento - String treatedT = t.replace(h, ""); - double percentage = 1 - ((double) treatedT.length() / (double) lt); -// AnalysisLogger.getLogger().debug("Complete Distance Calculation: coverage percentage of h on t " + percentage); -// double percentage = 0.9; - percentage = Math.min(percentage * matchFactor,0.98); - distance = percentage; - } - else if (h.contains(t)) { - // calcolo la percentuale di contenimento - String treatedH = h.replace(t, ""); - double percentage = 1 - ((double) treatedH.length() / (double) lh); -// AnalysisLogger.getLogger().debug("Complete Distance Calculation: coverage percentage of t on h " + percentage); -// double percentage = 0.9; - percentage = Math.min(percentage * matchFactor,0.98); - distance = percentage; - } - else { - /* - if ((lh>lt)||((lt>lh*1.5))){ - System.out.println("UNMATCHABLE "+lt +" vs "+lh); - distance = 0; - } - else{ - */ - //calcolo percentuale su Levenshtein distance - int levenDist = LD(h, t); - int maxlen = Math.max(lh, lt); - distance = 1-((double)levenDist / (double)maxlen); -// System.out.println("L " + levenDist+" max "+maxlen+" h "+h+" t "+t); -// AnalysisLogger.getLogger().debug("Complete Distance Calculation: leven distance percentage of h on t " + distance); -// } - } - } - - return distance; - } - - private String treatString(String h, boolean ignoreCase){ - //tolgo la punteggiatura - h = h.replaceAll("[!\"#$%&'()*+,./:;<=>?@\\^_`{|}~-]", ""); - //riduco gli spazi multipli a spazi singoli - h = h.replaceAll("[ ]+", " "); - //trim - h = h.trim(); - if (ignoreCase) - h = h.toLowerCase(); - - return h; - } - - - public static void main(String[] args) { - - String h = "Mediteranean"; - String t = "Mediterranean horse mackerel"; - DistanceCalculator d = new DistanceCalculator(); - double cd = d.CD(false,h, t, true , true); - System.out.println("Distance between "+h+" and "+t+" : " + cd); - - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/utils/FileTools.java b/src/org/gcube/contentmanagement/lexicalmatcher/utils/FileTools.java deleted file mode 100644 index 8487309..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/utils/FileTools.java +++ /dev/null @@ -1,89 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.utils; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.io.UnsupportedEncodingException; -import java.io.Writer; - -import org.dom4j.Document; -import org.dom4j.io.SAXReader; - -public class FileTools { - - public static String readXMLDoc(String xmlFilePath) throws Exception { - String xml = null; - - File fl = new File(xmlFilePath); - FileInputStream stream = new FileInputStream(fl); - SAXReader saxReader = new SAXReader(); - Document document = saxReader.read(stream); - xml = document.asXML(); - return xml; - } - - public static void saveString2File(String filename, String string2save) throws Exception { - - } - - public static boolean checkInput(String filename) { - File file = new File(filename); - if (!file.exists()) - return false; - if (!file.canRead()) - return false; - else - return true; - } - - public static boolean checkOutput(String filename, boolean overwrite) { - File file = new File(filename); - if (!overwrite && file.exists()) - return false; - if (file.exists() && (file.isDirectory() || !file.canWrite())) - return false; - else - return true; - } - - public static String loadString(String filename, String encoding) throws Exception { - try { - if (checkInput(filename)) { - - BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(filename), encoding)); - String line = null; - StringBuilder vud = new StringBuilder(); - - while ((line = in.readLine()) != null) { - vud.append(line + "\n"); - } - in.close(); - return vud.toString(); - } else - return null; - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - throw new Exception("The file " + filename + " is not in the correct format!"); - } catch (IOException e) { - throw new Exception("The file " + filename + " is not in the correct format!"); - } - } - - public static void saveString(String filename, String s, boolean overwrite, String encoding) throws Exception { - try { - if (checkOutput(filename, overwrite)) { - Writer out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename), encoding)); - out.write(s); - out.close(); - } - } catch (IOException e) { - throw new Exception("The system can not write in " + filename + " because:\n" + e.getMessage()); - } - } - -} diff --git a/src/org/gcube/contentmanagement/lexicalmatcher/utils/MathFunctions.java b/src/org/gcube/contentmanagement/lexicalmatcher/utils/MathFunctions.java deleted file mode 100644 index 0015b0e..0000000 --- a/src/org/gcube/contentmanagement/lexicalmatcher/utils/MathFunctions.java +++ /dev/null @@ -1,111 +0,0 @@ -package org.gcube.contentmanagement.lexicalmatcher.utils; - -import java.math.BigInteger; -import java.util.ArrayList; - -public class MathFunctions { - - /** - * @param args - */ - public static void main(String[] args) { - // TODO Auto-generated method stub - - } - - //increments a percentage o mean calculation when a lot of elements are present - public static float incrementPerc(float perc, float quantity, int N){ - - if (N==0) - return quantity; - - float out = 0; - int N_plus_1 = N+1; - out = (float)((perc + ((double)quantity / (double)N )) * ((double)N/(double)N_plus_1)); - return out; - - } - - //increments a percentage o mean calculation when a lot of elements are present - public static double incrementAvg(double perc, double quantity, int N){ - - if (N==0) - return quantity; - - double out = 0; - int N_plus_1 = N+1; - out = (double)((perc + ((double)quantity / (double)N )) * ((double)N/(double)N_plus_1)); - return out; - - } - - public static ArrayList generateRandoms(int numberOfRandoms, int min, int max) { - - ArrayList randomsSet = new ArrayList(); - // if number of randoms is equal to -1 generate all numbers - if (numberOfRandoms == -1) { - for (int i = min; i < max; i++) { - randomsSet.add(i); - } - } else { - int numofrandstogenerate = 0; - if (numberOfRandoms <= max) { - numofrandstogenerate = numberOfRandoms; - } else { - numofrandstogenerate = max; - } - - if (numofrandstogenerate == 0) { - randomsSet.add(0); - } else { - for (int i = 0; i < numofrandstogenerate; i++) { - - int RNum = -1; - RNum = (int) ((max) * Math.random()) + min; - - // generate random number - while (randomsSet.contains(RNum)) { - RNum = (int) ((max) * Math.random()) + min; - // AnalysisLogger.getLogger().debug("generated " + RNum); - } - - // AnalysisLogger.getLogger().debug("generated " + RNum); - - if (RNum >= 0) - randomsSet.add(RNum); - } - - } - } - - AnalysisLogger.getLogger().trace("MathFunctions-> generateRandoms " + randomsSet.toString()); - - return randomsSet; - } - - - public static int[] generateSequence(int elements) { - int [] sequence = new int[elements]; - for (int i=0;i featuresTable; - private List preprocessedTables; - private List endpoints; - - //service and remote - private String remoteCalculatorEndpoint; - private String serviceUserName; - private String remoteEnvironment; - private Integer numberOfResources; - - //modeling - private String model; - private String generator; - private String gcubeScope; - - //other properties - private HashMap generalProperties; - - public String getParam(String key){ - if (generalProperties != null) - return generalProperties.get(key); - else return null; - } - - public void setParam(String key,String value){ - if (generalProperties == null) - generalProperties = new HashMap(); - - generalProperties.put(key,value); - } - - - public void setConfigPath(String configPath) { - if (!configPath.endsWith("/")) - configPath+="/"; - this.configPath = configPath; - } - - public String getConfigPath() { - return configPath; - } - - public void setNumberOfResources(Integer numberOfThreads) { - this.numberOfResources = numberOfThreads; - } - - public Integer getNumberOfResources() { - return numberOfResources; - } - - public void addGeneralProperties(HashMap generalProperties) { - for (String key:generalProperties.keySet()) { - this.generalProperties.put(key,generalProperties.get(key)); - } - } - - public void setGeneralProperties(HashMap generalProperties) { - this.generalProperties = generalProperties; - } - - public HashMap getGeneralProperties() { - return generalProperties; - } - - public String getModel() { - return model; - } - - public void setModel(String model) { - this.model = model; - } - - public String getPersistencePath() { - return persistencePath; - } - - public void setPersistencePath(String persistencePath) { - this.persistencePath = persistencePath; - } - - //the agent is the processor running an algorithm, or a modeler or an evaluator of performances - //it has been distinguished from the Model variable in order to separate the meta-processor from the underlying processor - public String getAgent() { - return generator; - } - //the agent is the processor running an algorithm, or a modeler or an evaluator of performances - //it has been distinguished from the Model variable in order to separate the meta-processor from the underlying processor - public void setAgent(String generator) { - this.generator = generator; - } - - public static SessionFactory getConnectionFromConfig(AlgorithmConfiguration Input){ - // init the database - String defaultDatabaseFile = Input.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile; - - Input.setDatabaseDriver(Input.getParam("DatabaseDriver")); - Input.setDatabaseUserName(Input.getParam("DatabaseUserName")); - Input.setDatabasePassword(Input.getParam("DatabasePassword")); - Input.setDatabaseURL(Input.getParam("DatabaseURL")); - SessionFactory connection = null; - try { - connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().trace("ERROR initializing connection"); - } - return connection; - } - - - public void initRapidMiner(){ - System.setProperty("rapidminer.init.operators", configPath+ AlgorithmConfiguration.RapidMinerOperatorsFile); - RapidMiner.init(); - AnalysisLogger.getLogger().info("Rapid Miner initialized"); - } - - public String getGcubeScope() { - return gcubeScope; - } - - public void setGcubeScope(String gcubeScope) { - this.gcubeScope = gcubeScope; - } - - public List getEndpoints() { - return endpoints; - } - - public void setEndpoints(List endpoints) { - this.endpoints = endpoints; - } - - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/configuration/INFRASTRUCTURE.java b/src/org/gcube/dataanalysis/ecoengine/configuration/INFRASTRUCTURE.java deleted file mode 100644 index 92cb0d8..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/configuration/INFRASTRUCTURE.java +++ /dev/null @@ -1,9 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.configuration; - -public enum INFRASTRUCTURE { - - RAINY_CLOUD, - D4SCIENCE, - LOCAL - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteGenerationManager.java b/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteGenerationManager.java deleted file mode 100644 index 2e7a9bc..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteGenerationManager.java +++ /dev/null @@ -1,71 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.connectors; - -import org.gcube.contentmanagement.graphtools.utils.HttpRequest; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; - -public class RemoteGenerationManager { - - private final String submissionMethod = "submit"; - private final String statusMethod = "status/"; - private String submissionID; - private String username; - private String endpoint; - - public RemoteGenerationManager(String generatorEndPoint){ - if (generatorEndPoint.charAt(generatorEndPoint.length()-1)=='/') - endpoint = generatorEndPoint; - else - endpoint = generatorEndPoint+"/"; - } - - public void submitJob(RemoteHspecInputObject rhio) throws Exception{ - - AnalysisLogger.getLogger().warn("RemoteGenerationManager: retrieving job information"); - RemoteHspecOutputObject rhoo = null; - username = rhio.userName; - try{ - rhoo = (RemoteHspecOutputObject)HttpRequest.postJSonData(endpoint+submissionMethod, rhio, RemoteHspecOutputObject.class); - AnalysisLogger.getLogger().trace("RemoteGenerationManager: job information retrieved"); - }catch(Exception e){ - e.printStackTrace(); - AnalysisLogger.getLogger().trace("RemoteGenerationManager: ERROR - job information NOT retrieved"); - throw e; - } - if ((rhoo!=null) && (rhoo.id!=null)){ - AnalysisLogger.getLogger().warn("RemoteGenerationManager: job ID retrieved "); - submissionID = rhoo.id; - } - else{ - AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - job ID NOT retrieved "+rhoo.error); - throw new Exception(); - } - } - - public double retrieveCompletion(){ - RemoteHspecOutputObject rhoo = retrieveCompleteStatus(); - - try{ - double completion = Double.parseDouble(rhoo.completion); - return completion; - }catch(Exception e){ - e.printStackTrace(); - AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e); - } - return 0; - } - - public RemoteHspecOutputObject retrieveCompleteStatus(){ - RemoteHspecOutputObject rhoo = null; - - try{ - rhoo = (RemoteHspecOutputObject)HttpRequest.getJSonData(endpoint+statusMethod+submissionID, null ,RemoteHspecOutputObject.class); - }catch(Exception e){ - e.printStackTrace(); - AnalysisLogger.getLogger().warn("RemoteGenerationManager: ERROR - cannot retrieve information from remote site ",e); - } - - return rhoo; - } - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteHspecInputObject.java b/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteHspecInputObject.java deleted file mode 100644 index 263c21e..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteHspecInputObject.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.connectors; - -import java.util.HashMap; -import java.util.List; - -public class RemoteHspecInputObject { - - public String userName; - public int nWorkers; - public String id; - public String generativeModel; - public String environment; - public List speciesList; - public Table hspenTableName; - public Table hcafTableName; - public Table hspecDestinationTableName; - public Table occurrenceCellsTable; - public boolean is2050; - public boolean isNativeGeneration; - public HashMap configuration; - - public RemoteHspecInputObject() { - hspenTableName=new Table(); - hcafTableName=new Table(); - hspecDestinationTableName=new Table(); - occurrenceCellsTable=new Table(); - } - public class Table{ - public String jdbcUrl; - public String tableName; - } -} diff --git a/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteHspecOutputObject.java b/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteHspecOutputObject.java deleted file mode 100644 index c414eb8..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/connectors/RemoteHspecOutputObject.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.connectors; - -import java.util.List; - -import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; -import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.SingleResource; - -public class RemoteHspecOutputObject { - public String id; - public String status; - public String completion; - public Metric metrics; - public String error; - - - -// "load":[{"resId":"W1","value":51.5},{"resId":"W2","value":23.4}],"throughput":[1307977348021,16490000]} - - public class Metric{ - public long timestamp; - public double activityvalue; - public int processedspecies; - public Resources resources; - public List load; - public List throughput; - - public Metric(){ - resources = new Resources(); - } - public String toString(){ - return timestamp+""+activityvalue+""+resources; - } - - - } - - public String toString(){ - return id+";"+status+";"+completion+";"+metrics+";"+error+";"; - } -} diff --git a/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/ResourceLoad.java b/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/ResourceLoad.java deleted file mode 100644 index a0d3464..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/ResourceLoad.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.connectors.livemonitor; - -public class ResourceLoad { - public long timestamp; - public double value; - - - public ResourceLoad (long time,double val){ - timestamp = time; - value = val; - } - public String toString(){ - return "["+timestamp+", "+value+"]"; - } - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/Resources.java b/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/Resources.java deleted file mode 100644 index 85d2830..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/Resources.java +++ /dev/null @@ -1,20 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.connectors.livemonitor; - -import java.util.ArrayList; -import java.util.List; - -public class Resources { - - public List list; - - public Resources(){ - list = new ArrayList(); - } - - public void addResource(String resID, double value){ - - list.add(new SingleResource(resID, value)); - - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/SingleResource.java b/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/SingleResource.java deleted file mode 100644 index f12cc67..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/connectors/livemonitor/SingleResource.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.connectors.livemonitor; - -public class SingleResource { - - public String resId; - public double value; - public SingleResource(String resid, double val){ - resId = resid; - value = val; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java deleted file mode 100644 index 91ffd55..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/DiscrepancyAnalysis.java +++ /dev/null @@ -1,211 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; -import org.gcube.dataanalysis.ecoengine.utils.Operations; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; -import org.hibernate.SessionFactory; - -public class DiscrepancyAnalysis extends DataAnalysis { - -// static String discrepancyQuery = "select distinct a.%1$s as csquareone,b.%2$s as csquaretwo,a.%3$s as firstprob,b.%4$s as secondprob from %5$s as a inner join %6$s as b on a.%1$s=b.%2$s and (a.%3$s<>b.%4$s)"; -// static String discrepancyQuery = "select distinct a.%1$s as csquareone,b.%2$s as csquaretwo,a.%3$s as firstprob,b.%4$s as secondprob from (select * from %5$s order by %1$s limit %7$s) as a inner join (select * from %6$s order by %2$s limit %7$s) as b on a.%1$s=b.%2$s and (a.%3$s<>b.%4$s)"; - static String discrepancyQuery = "select distinct a.%1$s as csquareone,b.%2$s as csquaretwo,a.%3$s as firstprob,b.%4$s as secondprob from " + - "(select csquarecode,sum(%3$s) as %3$s from (select * from %5$s order by %1$s limit %7$s) as aa group by %1$s) as a " + - "inner join " + - "(select csquarecode,sum(%4$s) as %4$s from (select * from %6$s order by %2$s limit %7$s) as aa group by %2$s) as b " + - "on a.%1$s=b.%2$s and (a.%3$s<>b.%4$s)"; - - static String getNumberOfElementsQuery = "select count(*) from %1$s"; - private static int minElements = 100; - private static int maxElements = 30000; - - float threshold = 0.1f; - String configPath = "./cfg/"; - SessionFactory connection; - List errors; - double mean; - double variance; - int numberoferrors; - int numberofvectors; - float maxerror; - String maxdiscrepancyPoint; - private HashMap output; - - @Override - public HashMap getInputParameters() { - - HashMap parameters = new HashMap(); - parameters.put("FirstTable", new VarCouple(VARTYPE.STRING, "hspec1")); - parameters.put("SecondTable", new VarCouple(VARTYPE.STRING, "hspec2")); - parameters.put("FirstTableCsquareColumn", new VarCouple(VARTYPE.STRING, "csquare")); - parameters.put("SecondTableCsquareColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("FirstTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquare")); - parameters.put("SecondTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("ComparisonThreshold", new VarCouple(VARTYPE.STRING, "0.1")); - parameters.put("MaxSamples", new VarCouple(VARTYPE.STRING, "10000")); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - - return parameters; - } - - @Override - public List getOutputParameters() { - - List outputs = new ArrayList(); - - outputs.add("MEAN"); - outputs.add("VARIANCE"); - outputs.add("NUMBER_OF_ERRORS"); - outputs.add("NUMBER_OF_COMPARISONS"); - outputs.add("ACCURACY"); - outputs.add("MAXIMUM_ERROR"); - outputs.add("MAXIMUM_ERROR_POINT"); - - return outputs; - - } - - @Override - public void init(AlgorithmConfiguration config) throws Exception { - AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - // init db connection - connection = AlgorithmConfiguration.getConnectionFromConfig(config); - } - - @Override - public HashMap analyze(AlgorithmConfiguration config) throws Exception { - - String FirstTableCsquareColumn = config.getParam("FirstTableCsquareColumn"); - String SecondTableCsquareColumn = config.getParam("SecondTableCsquareColumn"); - String FirstTableProbabilityColumn = config.getParam("FirstTableProbabilityColumn"); - String SecondTableProbabilityColumn = config.getParam("SecondTableProbabilityColumn"); - String FirstTable = config.getParam("FirstTable"); - String SecondTable = config.getParam("SecondTable"); - String maxSamples = config.getParam("MaxSamples"); - int maxCompElements = maxElements; - if (maxSamples!=null && maxSamples.length()>0){ - int maxx = Integer.parseInt(maxSamples); - maxCompElements = maxx!=0?maxx:Integer.MAX_VALUE; - } - -// String query = String.format(discrepancyQuery, FirstTableCsquareColumn, SecondTableCsquareColumn, FirstTableProbabilityColumn, SecondTableProbabilityColumn, FirstTable, SecondTable); - List nelementsQ = DatabaseFactory.executeSQLQuery(DatabaseUtils.countElementsStatement(FirstTable),connection); - int nelements = Integer.parseInt(""+nelementsQ.get(0)); - - nelements = Math.min(Operations.calcNumOfRepresentativeElements(nelements, minElements),maxCompElements); - - AnalysisLogger.getLogger().trace("Number Of Elements to take: "+nelements); - String query = String.format(discrepancyQuery, FirstTableCsquareColumn, SecondTableCsquareColumn, FirstTableProbabilityColumn, SecondTableProbabilityColumn, FirstTable, SecondTable,""+nelements); - - List takeNPoints = DatabaseFactory.executeSQLQuery(String.format(getNumberOfElementsQuery, FirstTable), connection); - List takeMPoints = DatabaseFactory.executeSQLQuery(String.format(getNumberOfElementsQuery, SecondTable), connection); - int nPoints = Integer.parseInt(""+takeNPoints.get(0)); - int mPoints = Integer.parseInt(""+takeMPoints.get(0)); - numberofvectors = Math.max(nPoints, mPoints); - - AnalysisLogger.getLogger().trace("Discrepancy Calculation - Query to perform :" + query); - List takePoints = DatabaseFactory.executeSQLQuery(query, connection); - - super.processedRecords = 0; - if (takePoints != null) - super.processedRecords = takePoints.size(); - - threshold = Float.parseFloat(config.getParam("ComparisonThreshold")); - analyzeCompareList(takePoints); - calcDiscrepancy(); - - output = new HashMap(); - output.put("MEAN", "" + mean); - output.put("VARIANCE", "" + variance); - output.put("NUMBER_OF_ERRORS", "" + numberoferrors); - output.put("NUMBER_OF_COMPARISONS", "" + numberofvectors); - - float accuracy = 100; - if (processedRecords>0) - accuracy = (1 - (float) numberoferrors / (float) numberofvectors) * 100; - - - output.put("ACCURACY", "" + accuracy); - output.put("MAXIMUM_ERROR", "" + maxerror); - output.put("MAXIMUM_ERROR_POINT", "" + maxdiscrepancyPoint); - - return output; - - } - - public void end() { - try { - connection.close(); - } catch (Exception e) { - } - } - - void calcDiscrepancy() { - double[] err = new double[errors.size()]; - int i = 0; - for (Float e : errors) { - err[i] = e; - i++; - } - - mean = 0; - variance = 0; - - if (err.length > 0) { - mean = MathFunctions.mean(err); - variance = com.rapidminer.tools.math.MathFunctions.variance(err, Double.NEGATIVE_INFINITY); - } - } - - public void analyzeCompareList(List points) { - errors = new ArrayList(); - - if (points != null) { - maxerror = 0; - for (Object vector : points) { - Object[] elements = (Object[]) vector; - String csquare = (String) elements[0]; - float probabilityPoint1 = 0; - if (elements[2] != null) - probabilityPoint1 = (Float) elements[2]; - float probabilityPoint2 = 0; - if (elements[3] != null) - probabilityPoint2 = (Float) elements[3]; - float discrepancy = Math.abs(probabilityPoint2 - probabilityPoint1); - - if (discrepancy > threshold) { - errors.add(Math.abs(probabilityPoint2 - probabilityPoint1)); - numberoferrors++; - if (discrepancy > maxerror) { - maxerror = discrepancy; - maxdiscrepancyPoint = csquare; - } - } - } - } - - } - - @Override - public VARTYPE getContentType() { - return VARTYPE.MAP; - } - - @Override - public Object getContent() { - return output; - } -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java deleted file mode 100644 index 34ce25d..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/DistributionQualityAnalysis.java +++ /dev/null @@ -1,336 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; - -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; -import org.hibernate.SessionFactory; - -import com.rapidminer.example.Attribute; -import com.rapidminer.example.Attributes; -import com.rapidminer.example.ExampleSet; -import com.rapidminer.example.table.AttributeFactory; -import com.rapidminer.example.table.BinominalMapping; -import com.rapidminer.example.table.DoubleArrayDataRow; -import com.rapidminer.example.table.MemoryExampleTable; -import com.rapidminer.tools.Ontology; -import com.rapidminer.tools.math.ROCData; -import com.rapidminer.tools.math.ROCDataGenerator; - -public class DistributionQualityAnalysis extends DataAnalysis { - - static String getProbabilititesQuery = "select count(*) as distribprob from %1$s as a join %2$s as b on a.%3$s=b.%4$s and b.%5$s %6$s %7$s"; - static String getNumberOfElementsQuery = "select count(*) from %1$s"; - - static String getValuesQuery = "select %5$s as distribprob from %1$s as a join %2$s as b on a.%3$s=b.%4$s"; - - float threshold = 0.1f; - String configPath = "./cfg/"; - float acceptanceThreshold = 0.8f; - float rejectionThreshold = 0.3f; - double bestThreshold = 0.5d; - private HashMap output; - - public HashMap getInputParameters() { - - HashMap parameters = new HashMap(); - - parameters.put("PositiveCasesTable", new VarCouple(VARTYPE.STRING, "")); - parameters.put("NegativeCasesTable", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("PositiveCasesTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("NegativeCasesTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - - parameters.put("DistributionTable", new VarCouple(VARTYPE.STRING, "csquare")); - parameters.put("DistributionTableKeyColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - parameters.put("DistributionTableProbabilityColumn", new VarCouple(VARTYPE.STRING, "csquarecode")); - - parameters.put("PositiveThreshold", new VarCouple(VARTYPE.STRING, "0.8")); - parameters.put("NegativeThreshold", new VarCouple(VARTYPE.STRING, "0.3")); - - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - - return parameters; - } - - public List getOutputParameters() { - - List outputs = new ArrayList(); - - outputs.add("TRUE_POSITIVES"); - outputs.add("TRUE_NEGATIVES"); - outputs.add("FALSE_POSITIVES"); - outputs.add("FALSE_NEGATIVES"); - outputs.add("AUC"); - outputs.add("ACCURACY"); - outputs.add("SENSITIVITY"); - outputs.add("OMISSIONRATE"); - outputs.add("SPECIFICITY"); - outputs.add("BESTTHRESHOLD"); - - return outputs; - } - - private int calculateNumberOfPoints(String table) { - - String numberOfPositiveCasesQuery = String.format(getNumberOfElementsQuery, table); - List totalPoints = DatabaseFactory.executeSQLQuery(numberOfPositiveCasesQuery, connection); - int points = Integer.parseInt("" + totalPoints.get(0)); - return points; - } - - private int calculateCaughtPoints(String casesTable, String distributionTable, String casesTableKeyColumn, String distributionTableKeyColumn, String distributionTableProbabilityColumn, String operator, String threshold) { - - String query = String.format(getProbabilititesQuery, casesTable, distributionTable, casesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn, operator, threshold); - AnalysisLogger.getLogger().trace("Compare - Query to perform for caught cases:" + query); - List caughtpoints = DatabaseFactory.executeSQLQuery(query, connection); - int points = Integer.parseInt("" + caughtpoints.get(0)); - return points; - } - - private double[] getPoints(String casesTable, String distributionTable, String casesTableKeyColumn, String distributionTableKeyColumn, String distributionTableProbabilityColumn, int numberOfExpectedPoints) { - - String query = String.format(getValuesQuery, casesTable, distributionTable, casesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn); - - AnalysisLogger.getLogger().trace("Compare - Query to perform for caught cases:" + query); - List caughtpoints = DatabaseFactory.executeSQLQuery(query, connection); - int size = 0; - if (caughtpoints != null) - size = caughtpoints.size(); - double[] points = new double[numberOfExpectedPoints]; - - for (int i = 0; i < size; i++) { - double element = 0; - if (caughtpoints.get(i) != null) - element = Double.parseDouble("" + caughtpoints.get(i)); - - points[i] = element; - } - - return points; - } - - public HashMap analyze(AlgorithmConfiguration config) throws Exception { - - try { - acceptanceThreshold = Float.parseFloat(config.getParam("PositiveThreshold")); - } catch (Exception e) { - AnalysisLogger.getLogger().debug("ERROR : " + e.getLocalizedMessage()); - } - try { - rejectionThreshold = Float.parseFloat(config.getParam("NegativeThreshold")); - } catch (Exception e) { - AnalysisLogger.getLogger().debug("ERROR : " + e.getLocalizedMessage()); - } - - String positiveCasesTable = config.getParam("PositiveCasesTable"); - String negativeCasesTable = config.getParam("NegativeCasesTable"); - String distributionTable = config.getParam("DistributionTable"); - String positiveCasesTableKeyColumn = config.getParam("PositiveCasesTableKeyColumn"); - String negativeCasesTableKeyColumn = config.getParam("NegativeCasesTableKeyColumn"); - String distributionTableKeyColumn = config.getParam("DistributionTableKeyColumn"); - String distributionTableProbabilityColumn = config.getParam("DistributionTableProbabilityColumn"); - String acceptanceThreshold = config.getParam("PositiveThreshold"); - String rejectionThreshold = config.getParam("NegativeThreshold"); - - int numberOfPositiveCases = calculateNumberOfPoints(config.getParam("PositiveCasesTable")); - - int truePositives = calculateCaughtPoints(positiveCasesTable, distributionTable, positiveCasesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn, ">", acceptanceThreshold); - - int falseNegatives = numberOfPositiveCases - truePositives; - - int numberOfNegativeCases = calculateNumberOfPoints(negativeCasesTable); - - super.processedRecords = numberOfPositiveCases + numberOfNegativeCases; - - int falsePositives = calculateCaughtPoints(negativeCasesTable, distributionTable, negativeCasesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn, ">", rejectionThreshold); - - int trueNegatives = numberOfNegativeCases - falsePositives; - - double[] positivePoints = getPoints(positiveCasesTable, distributionTable, positiveCasesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn, numberOfPositiveCases); - - double[] negativePoints = getPoints(negativeCasesTable, distributionTable, negativeCasesTableKeyColumn, distributionTableKeyColumn, distributionTableProbabilityColumn, numberOfNegativeCases); - - double auc = calculateAUC(positivePoints, negativePoints, false); - double accuracy = calculateAccuracy(truePositives, trueNegatives, falsePositives, falseNegatives); - double sensitivity = calculateSensitivity(truePositives, falseNegatives); - double omissionrate = calculateOmissionRate(truePositives, falseNegatives); - double specificity = calculateSpecificity(trueNegatives, falsePositives); - - output = new HashMap(); - output.put("TRUE_POSITIVES", "" + truePositives); - output.put("TRUE_NEGATIVES", "" + trueNegatives); - output.put("FALSE_POSITIVES", "" + falsePositives); - output.put("FALSE_NEGATIVES", "" + falseNegatives); - output.put("AUC", "" + auc); - output.put("ACCURACY", "" + accuracy); - output.put("SENSITIVITY", "" + sensitivity); - output.put("OMISSIONRATE", "" + omissionrate); - output.put("SPECIFICITY", "" + specificity); - output.put("BESTTHRESHOLD", "" + bestThreshold); - - return output; - - } - - public double calculateSensitivity(int TP, int FN) { - return (double) (TP) / (double) (TP + FN); - } - - public double calculateOmissionRate(int TP, int FN) { - return (double) (FN) / (double) (TP + FN); - } - - public double calculateSpecificity(int TN, int FP) { - return (double) (TN) / (double) (TN + FP); - } - - public double calculateAccuracy(int TP, int TN, int FP, int FN) { - return (double) (TP + TN) / (double) (TP + TN + FP + FN); - } - - public double calculateAUC(double[] scoresOnPresence, double[] scoresOnAbsence, boolean produceChart) { - - List attributes = new LinkedList(); - Attribute labelAtt = AttributeFactory.createAttribute("LABEL", Ontology.BINOMINAL); - BinominalMapping bm = (BinominalMapping) labelAtt.getMapping(); - bm.setMapping("1", 1); - bm.setMapping("0", 0); - - Attribute confidenceAtt1 = AttributeFactory.createAttribute(Attributes.CONFIDENCE_NAME + "_1", Ontology.REAL); - attributes.add(confidenceAtt1); - attributes.add(labelAtt); - - MemoryExampleTable table = new MemoryExampleTable(attributes); - int numOfPoints = scoresOnPresence.length + scoresOnAbsence.length; - int numOfPresence = scoresOnPresence.length; - int numOfAttributes = attributes.size(); - double pos = labelAtt.getMapping().mapString("1"); - double neg = labelAtt.getMapping().mapString("0"); - - for (int i = 0; i < numOfPresence; i++) { - double[] data = new double[numOfAttributes]; - data[0] = scoresOnPresence[i]; - data[1] = pos; - table.addDataRow(new DoubleArrayDataRow(data)); - } - - for (int i = numOfPresence; i < numOfPoints; i++) { - double[] data = new double[numOfAttributes]; - data[0] = scoresOnAbsence[i - numOfPresence]; - data[1] = neg; - table.addDataRow(new DoubleArrayDataRow(data)); - } - - ROCDataGenerator roc = new ROCDataGenerator(acceptanceThreshold, rejectionThreshold); - ExampleSet exampleSet = table.createExampleSet(labelAtt); - exampleSet.getAttributes().setSpecialAttribute(confidenceAtt1, Attributes.CONFIDENCE_NAME + "_1"); - - ROCData dataROC = roc.createROCData(exampleSet, false); - double auc = roc.calculateAUC(dataROC); - - // PLOTS THE ROC!!! - if (produceChart) - roc.createROCPlotDialog(dataROC); - - bestThreshold = roc.getBestThreshold(); - return auc; - } - - public static void visualizeResults(HashMap results) { - - for (String key : results.keySet()) { - System.out.println(key + ":" + results.get(key)); - } - } - - SessionFactory connection; - - public void init(AlgorithmConfiguration config) throws Exception { - AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - // init db connection - connection = AlgorithmConfiguration.getConnectionFromConfig(config); - } - - public void end() { - try { - connection.close(); - } catch (Exception e) { - } - } - - public static void main(String[] args) { - - AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile); - /* - * double [] pos = new double [4]; // pos[0] = 1d; pos[1] = 0.8d;pos[2]=0.7;pos[3]=0.9; // pos[0] = 1d; pos[1] = 1d;pos[2]=1;pos[3]=1; // pos[0] = 0.3d; pos[1] = 0.7d;pos[2]=0.1;pos[3]=0.9; - * - * - * double [] neg = new double [4]; // neg[0] = 0d; neg[1] = 0.3d;neg[2]=0.4;neg[3]=0.6; // neg[0] = 0d; neg[1] = 0.0d;neg[2]=0.0;neg[3]=0.0; // neg[0] = 0.7d; neg[1] = 0.3d;neg[2]=0.9;neg[3]=0.1; - * - * DistributionQualityAnalysis quality = new DistributionQualityAnalysis(); double auc = quality.calculateAUC(pos, neg); System.out.println("AUC: "+auc); - * - * int n = 100; double[] posRandom = new double[n]; double[] negRandom = new double[n]; - * - * for (int i=0;i output; - private static int minimumNumberToTake = 10000; - private float status; - private int currentIterationStep; - private float innerstatus; - private int maxTests = 2; - - public HashMap getInputParameters() { - - HashMap parameters = new HashMap(); - - parameters.put("ProjectingAreaTable", new VarCouple(VARTYPE.STRING, "")); - parameters.put("ProjectingAreaFeaturesOptionalCondition", new VarCouple(VARTYPE.STRING, "oceanarea>0")); - - parameters.put("PositiveCasesTable", new VarCouple(VARTYPE.STRING, "")); - parameters.put("NegativeCasesTable", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("FeaturesColumns", new VarCouple(VARTYPE.STRING, "")); - parameters.put("PositiveFeaturesColumns", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - - return parameters; - } - - public List getOutputParameters() { - - List outputs = new ArrayList(); - - outputs.add("HRS_VECTOR"); - outputs.add("HRS"); - - return outputs; - } - - private int calculateNumberOfPoints(String table, String option) { - - String numberOfPositiveCasesQuery = String.format(getNumberOfElementsQuery, table); - numberOfPositiveCasesQuery = numberOfPositiveCasesQuery.replace("#OPTIONAL#", (option != null) ? option : ""); - List totalPoints = DatabaseFactory.executeSQLQuery(numberOfPositiveCasesQuery, connection); - int points = Integer.parseInt("" + totalPoints.get(0)); - return points; - } - - private double[][] getPoints(String table, String option, String features, int numberOfElemsToTake) { - - String query = String.format(getRandomVectors, features, table, "" + numberOfElemsToTake); - query = query.replace("#OPTIONAL#", (option != null) ? option : ""); - - AnalysisLogger.getLogger().trace("Compare - Query to perform for points:" + query); - List caughtpoints = DatabaseFactory.executeSQLQuery(query, connection); - int size = 0; - if (caughtpoints != null) - size = caughtpoints.size(); - double[][] points = null; - if (size > 0) { - - points = new double[size][((Object[]) caughtpoints.get(0)).length]; - - for (int i = 0; i < size; i++) { - - if (caughtpoints.get(i) != null) { - Object[] arrayFeatures = (Object[]) caughtpoints.get(i); - for (int j = 0; j < arrayFeatures.length; j++) { - double delement = arrayFeatures[j] == null ? 0d : Double.parseDouble("" + arrayFeatures[j]); - points[i][j] = delement; - } - } - - } - } - return points; - } - - - - - private void calcHRS(String projectingAreaTable, String projectingAreaFeaturesOptionalCondition, String FeaturesColumns, String positiveCasesTable, String negativeCasesTable,int numberOfElements) throws Exception{ - innerstatus = 0f; - int numberOfElementsToTake = Operations.calcNumOfRepresentativeElements(numberOfElements, minimumNumberToTake); - AnalysisLogger.getLogger().trace("HRS: TAKING "+numberOfElementsToTake+" POINTS ON "+numberOfElements+" FROM THE AREA UNDER ANALYSIS"); - // 1 - take the right number of points - double[][] areaPoints = getPoints(projectingAreaTable, projectingAreaFeaturesOptionalCondition, FeaturesColumns, numberOfElementsToTake); - AnalysisLogger.getLogger().trace("HRS: AREA POINTS MATRIX GENERATED"); - innerstatus = 10f; - Operations operations = new Operations(); - // 2 - standardize the matrix - areaPoints = operations.standardize(areaPoints); - AnalysisLogger.getLogger().trace("HRS: MATRIX HAS BEEN STANDARDIZED"); - innerstatus = 20f; - // 3 - calculate PCA - PrincipalComponentAnalysis pca = new PrincipalComponentAnalysis(); - pca.calcPCA(areaPoints); - AnalysisLogger.getLogger().trace("HRS: PCA HAS BEEN TRAINED"); - innerstatus = 30f; - // 4 - get the pca components for all the vector - double[][] pcaComponents = pca.getComponentsMatrix(areaPoints); - AnalysisLogger.getLogger().trace("HRS: PCA COMPONENT CALCULATED"); - innerstatus = 40f; - // 5 - calculate the frequency distributions for all the pca: each row will be a frequency distribution for a pca component associated to uniform divisions of the range - calcFrequenciesDistributionsForComponents(pcaComponents); - AnalysisLogger.getLogger().trace("HRS: FREQUENCIES FOR COMPONENTS CALCULATED"); - innerstatus = 50f; - // 6 - take positive points and negative points - eventually merge them - double[][] positivePoints = null; - if ((positiveCasesTable!=null) && (positiveCasesTable.length()>0)) - positivePoints = getPoints(positiveCasesTable, "", FeaturesColumns, numberOfElementsToTake); - double[][] negativePoints = null; - if ((negativeCasesTable!=null) && (negativeCasesTable.length()>0)) - negativePoints = getPoints(negativeCasesTable, "", FeaturesColumns, numberOfElementsToTake); - double[][] habitatPoints = Transformations.mergeMatrixes(positivePoints, negativePoints); - AnalysisLogger.getLogger().trace("HRS: HABITAT POINTS BUILT FROM POSITIVE AND NEGATIVE POINTS"); - innerstatus = 60f; - // 7 - Standardize the points respect to previous means and variances - habitatPoints = operations.standardize(habitatPoints, operations.means, operations.variances); - AnalysisLogger.getLogger().trace("HRS: HABITAT POINTS HAVE BEEN STANDARDIZED RESPECT TO PREVIOUS MEANS AND VARIANCES"); - // 8 - calculate the pca components for habitat - double[][] habitatPcaComponents = pca.getComponentsMatrix(habitatPoints); - AnalysisLogger.getLogger().trace("HRS: HABITAT POINTS HAVE BEEN TRANSFORMED BY PCA"); - innerstatus = 70f; - // 9 - calculate frequencies distributions for each component, respect to previous intervals - int components = habitatPcaComponents[0].length; - // 10 - calculate absolute differences and sum -> obtain a hrs for each PCA component = for each feature - currentHRSVector = new double[components]; - - double[][] habitatPcaPointsMatrix = Transformations.traspose(habitatPcaComponents); - for (int i = 0; i < components; i++) { - double[] habitatPcaPoints = habitatPcaPointsMatrix[i]; - // calculate frequency distributions respect to previous intervals - double[] habitatPcafrequencies = Operations.calcFrequencies(intervals.get(i), habitatPcaPoints); - habitatPcafrequencies = Operations.normalizeFrequencies(habitatPcafrequencies, habitatPcaPoints.length); - double[] absdifference = Operations.vectorialAbsoluteDifference(habitatPcafrequencies, frequencyDistrib.get(i)); - currentHRSVector[i] = Operations.sumVector(absdifference); - } - - AnalysisLogger.getLogger().trace("HRS: HRS VECTOR HAS BEEN CALCULATED"); - innerstatus = 90f; - // 11 - obtain hrsScore by weighted sum of hrs respect to inverse eigenvalues - too variable, substituted with the sum of the scores -// currentHRSScore = Operations.scalarProduct(currentHRSVector, pca.getInverseNormalizedEigenvalues()); - currentHRSScore = Operations.sumVector(currentHRSVector); - - AnalysisLogger.getLogger().trace("HRS: HRS SCORE HAS BEEN CALCULATED"); - innerstatus = 100f; - } - private double meanHRS ; - private double [] meanHRSVector; - private double currentHRSScore; - private double [] currentHRSVector; - - public HashMap analyze(AlgorithmConfiguration config) throws Exception { - - try { - status = 0; - String projectingAreaTable = config.getParam("ProjectingAreaTable"); - String projectingAreaFeaturesOptionalCondition = config.getParam("ProjectingAreaFeaturesOptionalCondition"); - String FeaturesColumns = config.getParam("FeaturesColumns"); - String positiveCasesTable = config.getParam("PositiveCasesTable"); - String negativeCasesTable = config.getParam("NegativeCasesTable"); - connection = AlgorithmConfiguration.getConnectionFromConfig(config); - meanHRS = 0; - int numberOfElements = calculateNumberOfPoints(projectingAreaTable, projectingAreaFeaturesOptionalCondition); - - for (int i=0;i(); - output.put("HRS_VECTOR", "" + Transformations.vector2String(meanHRSVector)); - output.put("HRS", "" + meanHRS); - - return output; - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("ALERT: AN ERROR OCCURRED DURING HRS CALCULATION : " + e.getLocalizedMessage()); - throw e; - } finally { - connection.close(); - status = 100; - AnalysisLogger.getLogger().trace("COMPUTATION FINISHED "); - } - } - - List frequencyDistrib; - List intervals; // uniform subdivision of the numeric ranges - - // calculate a frequency distribution for each component - public void calcFrequenciesDistributionsForComponents(double[][] pcaComponents) { - frequencyDistrib = null; - if (pcaComponents.length > 0) { - int sizeDistrib = pcaComponents[0].length; - frequencyDistrib = new ArrayList(); - intervals = new ArrayList(); - double[][] pcaColumns = Transformations.traspose(pcaComponents); - for (int i = 0; i < sizeDistrib; i++) { - double[] pcaPoints = pcaColumns[i]; - double[] interval = Operations.uniformDivide(Operations.getMax(pcaPoints), Operations.getMin(pcaPoints), pcaPoints); - double[] frequencies = Operations.calcFrequencies(interval, pcaPoints); - frequencies = Operations.normalizeFrequencies(frequencies, pcaPoints.length); - intervals.add(interval); - frequencyDistrib.add(frequencies); - } - } - } - - public static void visualizeResults(HashMap results) { - - for (String key : results.keySet()) { - System.out.println(key + ":" + results.get(key)); - } - } - - SessionFactory connection; - - public void init(AlgorithmConfiguration config) throws Exception { - AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - // init db connection - // connection = AlgorithmConfiguration.getConnectionFromConfig(config); - config.initRapidMiner(); - } - - public void end() { - try { - connection.close(); - } catch (Exception e) { - } - } - - public static void main(String[] args) throws Exception { - - AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile); - AlgorithmConfiguration config = new AlgorithmConfiguration(); - - config.setConfigPath("./cfg/"); - config.setParam("ProjectingAreaTable", "hcaf_d"); -// config.setParam("ProjectingAreaTable", "absence_data_baskingshark_random"); -// config.setParam("ProjectingAreaTable", "absence_data_baskingshark2"); - config.setParam("ProjectingAreaFeaturesOptionalCondition", "where oceanarea>0"); - config.setParam("FeaturesColumns", "depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea"); - config.setParam("PositiveCasesTable", "presence_data_baskingshark"); - config.setParam("NegativeCasesTable", "absence_data_baskingshark_random"); -// config.setParam("NegativeCasesTable", "absence_data_baskingshark2"); - - HabitatRepresentativeness hsrcalc = new HabitatRepresentativeness(); - hsrcalc.init(config); - HashMap output = hsrcalc.analyze(config); - for (String param:output.keySet()){ - System.out.println(param+":"+output.get(param)); - } - /* - double[][] matrix = new double[7][2]; - double[] row1 = { 2d, 3d }; - double[] row2 = { 3d, 4d }; - double[] row3 = { 4d, 5d }; - double[] row4 = { 5d, 6d }; - double[] row5 = { 2d, 3d }; - double[] row6 = { 2d, 5d }; - double[] row7 = { 3d, 4d }; - - matrix[0] = row1; - matrix[1] = row2; - matrix[2] = row3; - matrix[3] = row4; - matrix[4] = row5; - matrix[5] = row6; - matrix[6] = row7; -*/ - // Operations operations = new Operations(); - // matrix = operations.standardize(matrix); - // hsrcalc.calcFrequenciesDistributionsForComponents(matrix); - -// double[][] bigmat = Transformations.mergeMatrixes(null, matrix); - - System.out.println("FINISHED"); - } - - @Override - public VARTYPE getContentType() { - return VARTYPE.MAP; - } - - @Override - public Object getContent() { - return output; - } - - @Override - public float getStatus() { - return status==100f?status: Math.min((status+(float)(currentIterationStep+1)*innerstatus/(float)maxTests),99f); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java deleted file mode 100644 index 093732d..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateAnalysis.java +++ /dev/null @@ -1,656 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation.bioclimate; - -import java.awt.Image; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.interfaces.Evaluator; -import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.Hspen; -import org.gcube.dataanalysis.ecoengine.processing.factories.EvaluatorsFactory; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; -import org.gcube.dataanalysis.ecoengine.utils.Operations; -import org.gcube.dataanalysis.ecoengine.utils.Tuple; -import org.hibernate.SessionFactory; -import org.jfree.data.category.DefaultCategoryDataset; - -import com.mchange.v1.util.ArrayUtils; - -public class BioClimateAnalysis { - - private static String countHighProbabilityCells = "select count(*) from %1$s where probability>%2$s"; - private static String countSeaCells = "select count(*) from %1$s as a join %2$s as b on a.oceanarea>0 and a.csquarecode=b.csquarecode and ((a.iceconannb.iceconann+%3$s) or " + "(a.salinitymeanb.salinitymean+%3$s) or (a.sstanmeanb.sstanmean+%3$s))"; - private static String iceLeakage = "select count(*) from %1$s as a join %2$s as b on a.csquarecode=b.csquarecode and (a.iceconann0"; - // private static String takeRangeOfDepths = "select distinct depthmin, max(depthmax) from %1$s group by depthmin order by depthmin"; - private static String takeRangeOfDepths = "select distinct depthmin, depthmax from %1$s order by depthmin"; - private static String takeRangeOfParam = "select %1$s from %2$s where %1$s is not null %3$s order by %1$s"; - private static String countNumberOfSpeciesPerRange = "select count(*) from %1$s where %4$s>%2$s and %4$s<=%3$s "; - private static String countNumberOfSpecies = "select count(*) from %1$s where depthmin<%2$s and depthmin>=%3$s and depthmax<%4$s and depthmax>=%5$s"; - private static String countProbabilityPerArea = "select count(*) from %1$s as a join hcaf_s as b on b.%2$s = %3$s and a.probability > #THRESHOLD# and a.csquarecode=b.csquarecode"; - - public static String salinityDefaultRange = "salinitymin>27.44 and salinitymin<=36.57"; - public static String salinityMinFeature = "salinitymin"; - - private static enum FIELD { - iceconann, sstanmean, salinitymean - }; - - private static String takeAvgSelection = "select avg(%1$s),%2$s from %3$s %4$s group by %2$s order by %2$s"; - private static String[] selectionCriteria = { "faoaream", "lme" }; - private static String[] timeseriesNames = { "Ice Conc.", "Sea Surface Temperature", "Salinity" }; - private static String[] quantitiesNames = { FIELD.iceconann.name(), FIELD.sstanmean.name(), FIELD.salinitymean.name() }; - private static String[] criteriaFilters = { "where faoaream>0", "where lme>0" }; - private static String[] criteriaNames = { "FaoArea", "LME" }; - - // private static String takeSubHspec = "select %1$s from #CLAUSE# order by %1$s"; - - private static String meanVal = "select avg(%2$s) from %1$s where oceanarea>0"; - // private static String meanVal = "select %2$s from %1$s where csquarecode = '1311:478:4'"; - - private String configPath; - private String temporaryDirectory; - - protected SessionFactory referencedbConnection; - - private String[] csquareTable; - private String[] finalDistributionTable; - - // hspec - private int[] highProbabilityCells; - private double[] discrepancies; - // hcaf - private double[] avgIce; - private double[] avgSST; - private double[] avgSalinity; - - private Evaluator eval; - - private float status; - private boolean liveRender; - boolean doHcafAn; - boolean doHspecAn; - - private LexicalEngineConfiguration config; - - static int width = 680; - static int height = 420; - static int defaultNumOfFeatureClusters = 10; - - public static void main(String[] args) throws Exception { - String configPath = "./cfg/"; - String databaseUrl = "jdbc:postgresql://localhost/testdb"; - String databaseUser = "gcube"; - String databasePassword = "d4science2"; - BioClimateAnalysis bioClimate = new BioClimateAnalysis(configPath, configPath, databaseUrl, databaseUser, databasePassword, true); - bioClimate.produceGraphs2D(); - } - - private static String[] SERIES = { "High Probability Cells Trend (>%1$s)", "Number of Changing Cells", "Reducing Ice Concentration Trend", "Average Discrepancy Between Distributions", "Average Trends", "Ice Concentration", "Sea Surface Temperature", "Salinity" }; - - public void produceGraphs2D() throws Exception { - DefaultCategoryDataset testpoints = new DefaultCategoryDataset(); - // double[] points = Operations.parabolicInterpolation(-200d, 200d, 100); - double[] points = Operations.parabolicInterpolation(0.1926, 0.1727, 20); - // double[] points = Operations.inverseParabolicInterpolation(-200d, 300d, 50); - // double[] points = Operations.inverseExponentialInterpolation(29d, 30d, 10); - for (int i = 0; i < points.length; i++) { - // System.out.println(points[i]); - testpoints.addValue(points[i], "Points", "" + i); - } - - BioClimateGraph lineg9 = new BioClimateGraph("parabols", Operations.getMax(points), Operations.getMin(points)); - lineg9.render(testpoints); - } - - private HashMap producedImages; - - public HashMap getProducedImages() { - return producedImages; - } - - private void produceGraphs(String[] csquareTableNames, String[] hspecTableNames, float threshold) throws Exception { - producedImages = new HashMap (); - int numberOfTrends = highProbabilityCells.length; - - // create the datasets... - DefaultCategoryDataset probabilityTrend = new DefaultCategoryDataset(); - DefaultCategoryDataset discrepanciesTrend = new DefaultCategoryDataset(); - - DefaultCategoryDataset avgIceD = new DefaultCategoryDataset(); - DefaultCategoryDataset avgSSTD = new DefaultCategoryDataset(); - DefaultCategoryDataset avgSalinityD = new DefaultCategoryDataset(); - - for (int i = 0; i < numberOfTrends; i++) { - if (doHcafAn) { - avgIceD.addValue(avgIce[i], "Ice Conc.", csquareTableNames[i]); - avgSSTD.addValue(avgSST[i], "SST", csquareTableNames[i]); - avgSalinityD.addValue(avgSalinity[i], "Salinity", csquareTableNames[i]); - } - if (doHspecAn) { - probabilityTrend.addValue(highProbabilityCells[i], "Number Of Cells", hspecTableNames[i]); - if (i > 0) { - discrepanciesTrend.addValue(discrepancies[i], "Mean Discrepancy Respect to Prev. Distrib.", finalDistributionTable[i]); - } - } - } - - if (doHspecAn) { - - double min = Operations.getMin(discrepancies); - discrepancies[0] = min; - - if (liveRender) { - BioClimateGraph lineg1 = new BioClimateGraph(String.format(SERIES[0],threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells)); - BioClimateGraph lineg4 = new BioClimateGraph(SERIES[3], Operations.getMax(discrepancies), min); - lineg4.render(discrepanciesTrend); - lineg1.render(probabilityTrend); - } - - producedImages.put("Probability_Trend",BioClimateGraph.renderStaticImgObject(width, height, probabilityTrend, String.format(SERIES[0],threshold), Operations.getMax(highProbabilityCells), Operations.getMin(highProbabilityCells))); - producedImages.put("Probability_Discrepancies_Trend",BioClimateGraph.renderStaticImgObject(width, height, discrepanciesTrend, SERIES[3], Operations.getMax(discrepancies), min)); - - } - if (doHcafAn) { - - if (liveRender) { - BioClimateGraph lineg6 = new BioClimateGraph(SERIES[5], Operations.getMax(avgIce), Operations.getMin(avgIce)); - BioClimateGraph lineg7 = new BioClimateGraph(SERIES[6], Operations.getMax(avgSST), Operations.getMin(avgSST)); - BioClimateGraph lineg8 = new BioClimateGraph(SERIES[7], Operations.getMax(avgSalinity), Operations.getMin(avgSalinity)); - lineg6.render(avgIceD); - lineg7.render(avgSSTD); - lineg8.render(avgSalinityD); - } - - producedImages.put("Average_Ice_Concentration",BioClimateGraph.renderStaticImgObject(width, height, avgIceD, SERIES[5], Operations.getMax(avgIce), Operations.getMin(avgIce))); - producedImages.put("Average_SST",BioClimateGraph.renderStaticImgObject(width, height, avgSSTD, SERIES[6], Operations.getMax(avgSST), Operations.getMin(avgSST))); - producedImages.put("Average_Salinity",BioClimateGraph.renderStaticImgObject(width, height, avgSalinityD, SERIES[7], Operations.getMax(avgSalinity), Operations.getMin(avgSalinity))); - - } - - AnalysisLogger.getLogger().trace("Produced All Images"); - - } - - public void hcafEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception { - globalEvolutionAnalysis(hcafTable, null, hcafTableNames, null, null, null,0f); - } - - public void hspecEvolutionAnalysis(String[] hspecTables, String[] hspecTableNames, String probabilityColumn, String csquareColumn,float threshold) throws Exception { - globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, probabilityColumn, csquareColumn, threshold); - } - - private String[] checkTableNames(String [] tablesNames){ - ArrayList newtables = new ArrayList(); - for (String table:tablesNames){ - int i=1; - String originalTable = table; - while (newtables.contains(table)){ - table = originalTable+"_"+i; - i++; - } - newtables.add(table); - } - String [] tables = new String[tablesNames.length]; - for (int j=0;j> GeoMap, String[] tablesNames) { - // produce a char for each feature - tablesNames = checkTableNames(tablesNames); - producedImages = new HashMap (); - for (String featurename : GeoMap.keySet()) { - DefaultCategoryDataset chart = new DefaultCategoryDataset(); - HashMap timeseries = GeoMap.get(featurename); - double absmax = -Double.MAX_VALUE; - double absmin = Double.MAX_VALUE; - for (String timeserie : timeseries.keySet()) { - double[] points = timeseries.get(timeserie); - for (int i = 0; i < points.length; i++) { - if (points[i] > absmax) - absmax = points[i]; - if (points[i] < absmin) - absmin = points[i]; - chart.addValue(points[i], timeserie, tablesNames[i]); - } - } - if (liveRender) { - BioClimateGraph lineg1 = new BioClimateGraph(featurename, absmax, absmin); - lineg1.render(chart); - } - producedImages.put(featurename.replace(" ", "_"),BioClimateGraph.renderStaticImgObject(width, height, chart, featurename, absmax, absmin)); - } - } - - /** - * Generates a chart for hspens in time according to a certain interval in the parameter - * e.g. : a chart for several salinity intervals - * @param hspenTables - * @param hspenTableNames - * @param parameterName - * @param condition - * @throws Exception - */ - public void speciesEvolutionAnalysis(String[] hspenTables, String[] hspenTableNames, String parameterName, String condition) throws Exception { - try { - referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); - AnalysisLogger.getLogger().debug("ReferenceDB initialized"); - status = 0f; - - int numbOfTables = (hspenTables != null) ? hspenTables.length : 0; - - if (numbOfTables > 0) { - - // a map for each range of features: depth[0,100] ,depth [100,200] - HashMap> GeoMap = new HashMap>(); - - float statusstep = 80f / (float) numbOfTables; - if (condition != null && (condition.length() > 0)) - condition = "and " + condition; - // take the spectrum of depths - AnalysisLogger.getLogger().trace("Range query: " + String.format(takeRangeOfParam, parameterName, hspenTables[0], condition)); - - List paramrange = DatabaseFactory.executeSQLQuery(String.format(takeRangeOfParam, parameterName, hspenTables[0], condition), referencedbConnection); - - int numberOfParams = paramrange.size(); - double absolutePMin = Double.parseDouble("" + ((Object) paramrange.get(0))); - double absolutePMax = Double.parseDouble("" + ((Object) paramrange.get(numberOfParams - 1))); - double step = (absolutePMax - absolutePMin) / (double) defaultNumOfFeatureClusters; - int pClusters[] = new int[defaultNumOfFeatureClusters + 1]; - pClusters[0] = 0; - for (int i = 1; i < pClusters.length; i++) { - double pToFind = absolutePMin + step * (i + 1); - int k = 0; - for (Object row : paramrange) { - if (Double.parseDouble("" + ((Object) row)) > pToFind) - break; - k++; - } - if (k >= numberOfParams) - k = numberOfParams - 1; - - pClusters[i] = k; - } - - // for each table - for (int i = 0; i < numbOfTables; i++) { - double pmax = 0; - // for each cluster build up a chart - for (int j = 1; j < pClusters.length; j++) { - - double prevpmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j - 1])),2); - pmax = MathFunctions.roundDecimal(Double.parseDouble("" + (Object) paramrange.get(pClusters[j])),2); - - if (prevpmax != pmax) { - // take the number of elements for this range - String countSpeciesQuery = String.format(countNumberOfSpeciesPerRange, hspenTables[i], prevpmax, pmax, parameterName); - AnalysisLogger.getLogger().trace("count elements query: " + countSpeciesQuery); - - List elementsInRange = DatabaseFactory.executeSQLQuery(countSpeciesQuery, referencedbConnection); - int nelements = (elementsInRange == null) ? 0 : Integer.parseInt("" + elementsInRange.get(0)); - - AnalysisLogger.getLogger().trace("Number of elements for " + hspenTables[i] + " in (" + prevpmax + " - " + pmax + ")" + " : " + nelements); - - // take the chart for this range - String chartName = parameterName + " envelope for interval (" + prevpmax + " ; " + pmax + ")"; - // build the chart - HashMap submap = GeoMap.get(chartName); - if (submap == null) { - submap = new HashMap(); - GeoMap.put(chartName, submap); - } - - String timeseries = "number of species"; - double[] elements = submap.get(timeseries); - if (elements == null) { - elements = new double[numbOfTables]; - submap.put(timeseries, elements); - } - elements[i] = nelements; - } - } - - status = status + statusstep; - } - - status = 80f; - produceCharts(GeoMap, hspenTableNames); - } - } catch (Exception e) { - e.printStackTrace(); - throw e; - } finally { - status = 100f; - referencedbConnection.close(); - } - } - - /** - * Generates a chart for hspec probability > thr in each Fao Area and LME - * @param hspecTables - * @param hspecTablesNames - * @throws Exception - */ - public void speciesGeographicEvolutionAnalysis(String[] hspecTables, String[] hspecTablesNames, float threshold) throws Exception { - try { - referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); - AnalysisLogger.getLogger().debug("ReferenceDB initialized"); - status = 0f; - - int numbOfTables = (hspecTables != null) ? hspecTables.length : 0; - - if (numbOfTables > 0) { - - // a map for each feature. each sub map contains a trend for faoaream, lme etc. - HashMap> GeoMap = new HashMap>(); - - float statusstep = 80f / (float) numbOfTables; - // for each table - for (int i = 0; i < numbOfTables; i++) { - // for each criterion to apply: fao area, lme etc. - for (int j = 0; j < criteriaNames.length; j++) { - List listCriterion = DatabaseFactory.executeSQLQuery(DatabaseUtils.getDinstictElements("hcaf_s", selectionCriteria[j],criteriaFilters[j]),referencedbConnection); - for (Object code: listCriterion){ - String code$ = ""+code; - String query = String.format(countProbabilityPerArea,hspecTables[i],selectionCriteria[j],code$); - query = query.replace("#THRESHOLD#", ""+threshold); - AnalysisLogger.getLogger().trace("Executing query for counting probabilities: "+query); - List counts = DatabaseFactory.executeSQLQuery(query, referencedbConnection); - AnalysisLogger.getLogger().trace("Query Executed"); - int countPerArea = (counts==null)?0:Integer.parseInt(""+counts.get(0)); - - String chartName = "Hspec (prob>0.8) for " + criteriaNames[j] + "_" + code$; - // put the code and the value in the timeseries associated to the feature name - HashMap submap = GeoMap.get(chartName); - if (submap == null) { - submap = new HashMap(); - GeoMap.put(chartName, submap); - } - - String timeseries = "number of occupied cells"; - double[] elements = submap.get(timeseries); - if (elements == null) { - elements = new double[numbOfTables]; - submap.put(timeseries, elements); - } - - elements[i] = countPerArea; - - } - - } - status = status + statusstep; - } - - status = 80f; - produceCharts(GeoMap, hspecTablesNames); - } - } catch (Exception e) { - e.printStackTrace(); - throw e; - } finally { - status = 100f; - referencedbConnection.close(); - } - } - -/** - * Generates a geographic trend for each hspec feature: ice con, salinity, sst in each fao area - * @param hcafTable - * @param hcafTableNames - * @throws Exception - */ - public void geographicEvolutionAnalysis(String[] hcafTable, String[] hcafTableNames) throws Exception { - try { - referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); - AnalysisLogger.getLogger().debug("ReferenceDB initialized"); - doHcafAn = true; - doHspecAn = true; - status = 0f; - this.csquareTable = hcafTable; - - int numbOfTables = (hcafTable != null) ? hcafTable.length : 0; - - if (numbOfTables > 0) { - - // a map for each feature. each sub map contains a trend for faoaream, lme etc. - HashMap> GeoMap = new HashMap>(); - - float statusstep = 80f / (float) numbOfTables; - // for each table - for (int i = 0; i < numbOfTables; i++) { - // for each criterion to apply: fao area, lme etc. - for (int j = 0; j < criteriaNames.length; j++) { - // for each quantity to display: ice concentration - for (int k = 0; k < quantitiesNames.length; k++) { - String query = String.format(takeAvgSelection, quantitiesNames[k], selectionCriteria[j], hcafTable[i], criteriaFilters[j]); - AnalysisLogger.getLogger().debug("Query to be executed : " + query); - // take couples (avg,code) - List quantityCriterion = DatabaseFactory.executeSQLQuery(query, referencedbConnection); - // for each row - for (Object element : quantityCriterion) { - Object[] row = (Object[]) element; - // take avg value - double value = (row[0] == null) ? 0 : Double.parseDouble("" + row[0]); - // take code for criterion - String code = "" + row[1]; - - String chartName = timeseriesNames[k] + " for " + criteriaNames[j] + "_" + code; - // put the code and the value in the timeseries associated to the feature name - HashMap submap = GeoMap.get(chartName); - if (submap == null) { - submap = new HashMap(); - GeoMap.put(chartName, submap); - } - - String timeseries = criteriaNames[j] + "_" + code; - double[] elements = submap.get(timeseries); - if (elements == null) { - elements = new double[numbOfTables]; - submap.put(timeseries, elements); - } - elements[i] = value; - } - } - } - status = status + statusstep; - } - - status = 80f; - produceCharts(GeoMap, hcafTableNames); - } - } catch (Exception e) { - e.printStackTrace(); - throw e; - } finally { - status = 100f; - referencedbConnection.close(); - } - } - - /** - * Generates a chart for each hspec feature - * Generates a chart for hspec prob > thr and performs a discrepancy analysis on hspec - * @param hcafTable - * @param hspecTables - * @param hcafTablesNames - * @param hspecTableNames - * @param probabilityColumn - * @param csquareColumn - * @throws Exception - */ - public void globalEvolutionAnalysis(String[] hcafTable, String[] hspecTables, String[] hcafTablesNames, String[] hspecTableNames, String probabilityColumn, String csquareColumn,float threshold) throws Exception { - try { - referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); - AnalysisLogger.getLogger().debug("ReferenceDB initialized"); - doHcafAn = true; - doHspecAn = true; - - if (hcafTable == null) - doHcafAn = false; - if (hspecTables == null) - doHspecAn = false; - - status = 0f; - this.csquareTable = hcafTable; - this.finalDistributionTable = hspecTables; - - int numbOfPoints = (hcafTable != null) ? hcafTable.length : hspecTables.length; - - highProbabilityCells = new int[numbOfPoints]; - discrepancies = new double[numbOfPoints]; - avgIce = new double[numbOfPoints]; - avgSST = new double[numbOfPoints]; - avgSalinity = new double[numbOfPoints]; - - float statusstep = 80f / (float) numbOfPoints; - - for (int i = 0; i < numbOfPoints; i++) { - if (doHspecAn) - highProbabilityCells[i] = calcHighProbabilityCells(hspecTables[i], threshold); - - if (doHcafAn) { - avgIce[i] = avgValue(hcafTable[i], FIELD.iceconann.name()); - avgSST[i] = avgValue(hcafTable[i], FIELD.sstanmean.name()); - avgSalinity[i] = avgValue(hcafTable[i], FIELD.salinitymean.name()); - AnalysisLogger.getLogger().trace("(" + hcafTable[i] + "): " + " ICE " + avgIce[i] + " SST " + avgSST[i] + " SAL " + avgSalinity[i]); - } - - if (doHspecAn) { - if (i == 0) { - discrepancies[i] = 1.0; - } else { - // OLD CALCULATION discrepancies[i] = MathFunctions.roundDecimal(calcDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5); - discrepancies[i] = MathFunctions.roundDecimal(calcOverDiscrepancy(configPath, temporaryDirectory, hspecTables[i], hspecTables[i - 1], probabilityColumn, csquareColumn, 0.1f), 5); - } - AnalysisLogger.getLogger().trace("(" + hspecTables[i] + "): DISCREPANCY " + discrepancies[i] + " HIGH PROB CELLS " + highProbabilityCells[i]); - } - - // AnalysisLogger.getLogger().trace("(" + hcafTable[i] + "," + hspecTables[i] + "): HIGH PROB CELLS " + highProbabilityCells[i] + " DISCREPANCY " + discrepancies[i] + " ICE " + avgIce[i] + " SST " + avgSST[i] + " SAL " + avgSalinity[i]); - - status = status + statusstep; - } - status = 80f; - produceGraphs(hcafTablesNames, hspecTableNames,threshold); - - } catch (Exception e) { - e.printStackTrace(); - throw e; - } finally { - status = 100f; - referencedbConnection.close(); - } - } - - // init connections - public BioClimateAnalysis(String configPath, String persistencePath, String databaseURL, String databaseUserName, String databasePassword, boolean liveRender) throws Exception { - this.configPath = configPath; - this.temporaryDirectory = persistencePath; - if (!configPath.endsWith("/")) - configPath += "/"; - if (!persistencePath.endsWith("/")) - this.temporaryDirectory += "/"; - - AnalysisLogger.setLogger(configPath + AlgorithmConfiguration.defaultLoggerFile); - config = new LexicalEngineConfiguration(); - config.setDatabaseURL(databaseURL); - config.setDatabaseUserName(databaseUserName); - config.setDatabasePassword(databasePassword); - - this.liveRender = liveRender; - } - - public int calcHighProbabilityCells(String hspec, double probabilty) throws Exception { - AnalysisLogger.getLogger().trace("Calculating High Prob Cells"); - List countage = DatabaseFactory.executeSQLQuery(String.format(countHighProbabilityCells, hspec, probabilty), referencedbConnection); - int count = Integer.parseInt("" + countage.get(0)); - AnalysisLogger.getLogger().trace("Calc High Prob Cells: " + count); - return count; - } - - public double avgValue(String hcaf1, String field) throws Exception { - - List countage = DatabaseFactory.executeSQLQuery(String.format(meanVal, hcaf1, field), referencedbConnection); - double count = 0; - if (countage != null && countage.size() > 0) - count = Double.parseDouble("" + countage.get(0)); - - return count; - } - - public int countIceLeakageCells(String hcaf1, String hcaf2) throws Exception { - - List countage = DatabaseFactory.executeSQLQuery(String.format(iceLeakage, hcaf1, hcaf2), referencedbConnection); - int count = Integer.parseInt("" + countage.get(0)); - return count; - } - - public int countSeaCells(String hcaf1, String hcaf2, double threshold) throws Exception { - // System.out.println(String.format(countSeaCells, hcaf1, hcaf2,threshold)); - List countage = DatabaseFactory.executeSQLQuery(String.format(countSeaCells, hcaf1, hcaf2, threshold), referencedbConnection); - int count = Integer.parseInt("" + countage.get(0)); - return count; - } - - public float getStatus() { - return status; - } - - public double calcOverDiscrepancy(String configPath, String persistencePath, String firstTable, String secondTable, String probabilityColumnName, String csquareColumn, float comparisonThreshold) throws Exception { - - List nelementsQ1 = DatabaseFactory.executeSQLQuery(DatabaseUtils.countElementsStatement(firstTable), referencedbConnection); - int nelements = Integer.parseInt("" + nelementsQ1.get(0)); - AnalysisLogger.getLogger().trace("Number Of elements1: " + nelementsQ1); - List nelementsQ2 = DatabaseFactory.executeSQLQuery(DatabaseUtils.countElementsStatement(secondTable), referencedbConnection); - int nelements2 = Integer.parseInt("" + nelementsQ2.get(0)); - AnalysisLogger.getLogger().trace("Number Of elements2: " + nelementsQ1); - - List sumFirst = DatabaseFactory.executeSQLQuery(DatabaseUtils.sumElementsStatement(firstTable, probabilityColumnName), referencedbConnection); - double sum1 = Double.parseDouble("" + sumFirst.get(0)); - AnalysisLogger.getLogger().trace("Sum1: " + sum1); - - List sumSecond = DatabaseFactory.executeSQLQuery(DatabaseUtils.sumElementsStatement(secondTable, probabilityColumnName), referencedbConnection); - double sum2 = Double.parseDouble("" + sumSecond.get(0)); - AnalysisLogger.getLogger().trace("Sum2: " + sum1); - - double d = (double) (sum2 - sum1) / (double) (nelements + nelements2); - return d; - } - - public double calcDiscrepancy(String configPath, String persistencePath, String firstTable, String secondTable, String probabilityColumnName, String csquareColumn, float comparisonThreshold) throws Exception { - - AlgorithmConfiguration config = new AlgorithmConfiguration(); - - config.setConfigPath(configPath); - config.setPersistencePath(persistencePath); - config.setNumberOfResources(1); - config.setAgent("DISCREPANCY_ANALYSIS"); - - config.setParam("FirstTable", firstTable); - config.setParam("SecondTable", secondTable); - - config.setParam("FirstTableCsquareColumn", csquareColumn); - config.setParam("SecondTableCsquareColumn", csquareColumn); - - config.setParam("FirstTableProbabilityColumn", probabilityColumnName); - config.setParam("SecondTableProbabilityColumn", probabilityColumnName); - - config.setParam("ComparisonThreshold", "" + comparisonThreshold); - - config.setParam("MaxSamples", "" + 30000); - - eval = EvaluatorsFactory.getEvaluators(config).get(0); - HashMap out = eval.process(config); - - Double d = Double.parseDouble(out.get("MEAN")); - return d; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateGraph.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateGraph.java deleted file mode 100644 index ca5898d..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/BioClimateGraph.java +++ /dev/null @@ -1,196 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation.bioclimate; - -import java.awt.Color; -import java.awt.Image; - -import org.gcube.contentmanagement.graphtools.abstracts.GenericStandaloneGraph; -import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools; -import org.jfree.chart.ChartFactory; -import org.jfree.chart.JFreeChart; -import org.jfree.chart.axis.CategoryAxis; -import org.jfree.chart.axis.CategoryLabelPositions; -import org.jfree.chart.axis.ValueAxis; -import org.jfree.chart.plot.CategoryPlot; -import org.jfree.chart.plot.PlotOrientation; -import org.jfree.chart.plot.PlotRenderingInfo; -import org.jfree.chart.renderer.category.LineAndShapeRenderer; -import org.jfree.data.category.DefaultCategoryDataset; -import org.jfree.data.general.Dataset; -import org.jfree.ui.RectangleInsets; - -public class BioClimateGraph extends GenericStandaloneGraph { - - private static final long serialVersionUID = 1L; - double max; - double min; - public BioClimateGraph(String title,double max,double min) { - super(title); - this.max= max; - this.min = min; - } - - protected Dataset generateDataset() { - // row keys... - String series1 = "First"; - String series2 = "Second"; - String series3 = "Third"; - - // column keys... - String type1 = "Type 1"; - String type2 = "Type 2"; - String type3 = "Type 3"; - String type4 = "Type 4"; - String type5 = "Type 5"; - String type6 = "Type 6"; - String type7 = "Type 7"; - String type8 = "Type 8"; - - // create the dataset... - DefaultCategoryDataset dataset = new DefaultCategoryDataset(); - - dataset.addValue(1.0, series1, type1); - dataset.addValue(4.0, series1, type2); - dataset.addValue(3.0, series1, type3); - dataset.addValue(5.0, series1, type4); - dataset.addValue(5.0, series1, type5); - dataset.addValue(7.0, series1, type6); - dataset.addValue(7.0, series1, type7); - dataset.addValue(8.0, series1, type8); - - dataset.addValue(5.0, series2, type1); - dataset.addValue(7.0, series2, type2); - dataset.addValue(6.0, series2, type3); - dataset.addValue(8.0, series2, type4); - dataset.addValue(4.0, series2, type5); - dataset.addValue(4.0, series2, type6); - dataset.addValue(2.0, series2, type7); - dataset.addValue(1.0, series2, type8); - - dataset.addValue(4.0, series3, type1); - dataset.addValue(3.0, series3, type2); - dataset.addValue(2.0, series3, type3); - dataset.addValue(3.0, series3, type4); - dataset.addValue(6.0, series3, type5); - dataset.addValue(3.0, series3, type6); - dataset.addValue(4.0, series3, type7); - dataset.addValue(3.0, series3, type8); - return dataset; - } - - - public static Image renderStaticImgObject(int width, int height, Dataset set, String title, double max, double min) { - - JFreeChart chart = createStaticChart(set,max,min,title); - - /* - JPanel jp = new ChartPanel(chart); - - this.setContentPane(jp); - this.pack(); - */ -// Image image = this.createImage(width, height); - - Image image = ImageTools.toImage(chart.createBufferedImage(width, height)); - - return image; - } - - - protected static JFreeChart createStaticChart(Dataset dataset, double max, double min, String title) { - - // create the chart... - JFreeChart chart = ChartFactory.createLineChart( - title, // chart title - "", // domain axis label - "", // range axis label - (DefaultCategoryDataset)dataset, // data - PlotOrientation.VERTICAL, // orientation - true, // include legend - true, // tooltips - false // urls - ); - chart.setBackgroundPaint(Color.white); - - CategoryPlot plot = chart.getCategoryPlot(); -// plot.setBackgroundPaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - plot.setDomainCrosshairVisible(true); - plot.setDomainGridlinesVisible(true); - plot.setRangeCrosshairVisible(true); - plot.setRenderer(new LineAndShapeRenderer(true,true)); - - CategoryAxis categoryaxis1 = plot.getDomainAxis(0); - categoryaxis1.setCategoryLabelPositions(CategoryLabelPositions.DOWN_45); - - plot.mapDatasetToDomainAxis(0, 0); - - if (max!=min){ - plot.getRangeAxis().setAutoRange(false); - plot.getRangeAxis().setUpperBound(max); - plot.getRangeAxis().setLowerBound(min); - double avg = min+((max-min)/2d); - plot.getRangeAxis().centerRange(avg); - } - - return chart; - } - - protected JFreeChart createChart(Dataset dataset) { - - // create the chart... - JFreeChart chart = ChartFactory.createLineChart( - this.getTitle(), // chart title - "", // domain axis label - "", // range axis label - (DefaultCategoryDataset)dataset, // data - PlotOrientation.VERTICAL, // orientation - true, // include legend - true, // tooltips - false // urls - ); - chart.setBackgroundPaint(Color.white); - - CategoryPlot plot = chart.getCategoryPlot(); -// plot.setBackgroundPaint(Color.white); - plot.setRangeGridlinePaint(Color.white); - plot.setDomainCrosshairVisible(true); - plot.setDomainGridlinesVisible(true); - plot.setRangeCrosshairVisible(true); - plot.setRenderer(new LineAndShapeRenderer(true,true)); - - CategoryAxis categoryaxis1 = plot.getDomainAxis(0); - categoryaxis1.setCategoryLabelPositions(CategoryLabelPositions.DOWN_45); - - plot.mapDatasetToDomainAxis(0, 0); - - -// plot.zoomRangeAxes(0.1,10d,null,null); -// - if (max!=min){ - plot.getRangeAxis().setAutoRange(false); - plot.getRangeAxis().setUpperBound(max); - plot.getRangeAxis().setLowerBound(min); - double avg = min+((max-min)/2d); - plot.getRangeAxis().centerRange(avg); - } - //deprecated - /* - LineAndShapeRenderer renderer = (LineAndShapeRenderer) plot.getRenderer(); - renderer.setShapesVisible(true); - renderer.setDrawOutlines(true); - renderer.setUseFillPaint(true); - renderer.setFillPaint(Color.white); -*/ - - - - - return chart; - } - - @Override - protected GenericStandaloneGraph getInstance(String title) { - return new BioClimateGraph(title,max,min); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ExperimentForArticle.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ExperimentForArticle.java deleted file mode 100644 index f0ebf2f..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ExperimentForArticle.java +++ /dev/null @@ -1,59 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation.bioclimate; - -import org.gcube.dataanalysis.ecoengine.user.GeneratorT; -import org.gcube.dataanalysis.ecoengine.user.ModelerT; - -public class ExperimentForArticle { - - public static void main(String[] args) throws Exception { - String configPath = "./cfg/"; - - final String[] csquareTables = { - "hcaf_d_2016_linear_01332632269756", - "hcaf_d_2020_linear_11332632270082", - "hcaf_d_2024_linear_21332632270343", - "hcaf_d_2028_linear_31332632270608", - "hcaf_d_2032_linear_41332632270847", - "hcaf_d_2036_linear_51332632271080", - "hcaf_d_2040_linear_61332632271334", - "hcaf_d_2044_linear_71332632271560", - "hcaf_d_2050" - }; - final String [] envelopeOutputTable = {"hspen_2016","hspen_2020","hspen_2024","hspen_2028","hspen_2032","hspen_2036","hspen_2040","hspen_2044","hspen_2050"}; - - final String [] finalDistributionTable = { "hspec_2016", "hspec_2020", "hspec_2024", "hspec_2028", "hspec_2032", "hspec_2036", "hspec_2040", "hspec_2044", "hspec_2050"}; - - /* - String [] csquareTable = {"hcaf_d","hcaf_d_2050"}; - String [] envelopeOutputTable = {"hspen_micro_0","hspen_micro_2050"}; - String [] finalDistributionTable = {"hspec_0","hspec_2050"}; - */ - - String preprocessedTable = "maxminlat_hspen"; - String envelopeTable = "hspen"; - String occurrenceCells = "occurrenceCells"; - - String commonkeycolumn = "csquarecode"; - String probabilitycolumn = "probability"; - - int numberOfResources = 4; - - String speciesCode = "Fis-10199"; - String userName = "gianpaolo.coro"; - String modelName = "HSPEN"; - String generatorName = "AQUAMAPS_SUITABLE"; - String qualityOperationName = "QUALITY_ANALYSIS"; - String discrepancyOperationName = "DISCREPANCY_ANALYSIS"; - String finalDistributionKeyColumn = "csquarecode"; - - for (int i =0;i "+ envelopeOutputTable[i]); - ModelerT.train(ModelerT.getTrainingConfigHSPEN(modelName, envelopeOutputTable[i], occurrenceCells,envelopeTable,csquareTables[i],configPath)); - // Generate -// GeneratorT.generate(GeneratorT.getGenerationConfig(numberOfResources, generatorName, envelopeOutputTable[i], preprocessedTable, speciesCode, userName, csquareTables[i], finalDistributionTable[i], configPath)); - } - System.out.println("COMPUTATION FINISHED!"); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ExperimentHSPECForArticle.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ExperimentHSPECForArticle.java deleted file mode 100644 index f91ab54..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ExperimentHSPECForArticle.java +++ /dev/null @@ -1,82 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation.bioclimate; - -import org.gcube.dataanalysis.ecoengine.user.GeneratorT; -import org.gcube.dataanalysis.ecoengine.user.ModelerT; - -public class ExperimentHSPECForArticle { - - public static void main(String[] args) throws Exception { - String configPath = "./cfg/"; - - final String[] csquareTables = { - "hcaf_d", - "hcaf_d_2016_linear_01332632269756", - "hcaf_d_2020_linear_11332632270082", - "hcaf_d_2024_linear_21332632270343", - "hcaf_d_2028_linear_31332632270608", - "hcaf_d_2032_linear_41332632270847", - "hcaf_d_2036_linear_51332632271080", - "hcaf_d_2040_linear_61332632271334", - "hcaf_d_2044_linear_71332632271560", - "hcaf_d_2050" - }; - - final String [] envelopeOutputTable = { - "hspen", - "hspen_2016", - "hspen_2020", - "hspen_2024", - "hspen_2028", - "hspen_2032", - "hspen_2036", - "hspen_2040", - "hspen_2044", - "hspen_2050" - }; - - final String [] finalDistributionTable = { - "hspec_2012", - "hspec_2016", - "hspec_2020", - "hspec_2024", - "hspec_2028", - "hspec_2032", - "hspec_2036", - "hspec_2040", - "hspec_2044", - "hspec_2050" - }; - - /* - String [] csquareTable = {"hcaf_d","hcaf_d_2050"}; - String [] envelopeOutputTable = {"hspen_micro_0","hspen_micro_2050"}; - String [] finalDistributionTable = {"hspec_0","hspec_2050"}; - */ - - String preprocessedTable = "maxminlat_hspen"; - String envelopeTable = "hspen"; - String occurrenceCells = "occurrenceCells"; - - String commonkeycolumn = "csquarecode"; - String probabilitycolumn = "probability"; - - int numberOfResources = 4; - - String speciesCode = "Fis-10199"; - String userName = "gianpaolo.coro"; - String modelName = "HSPEN"; - String generatorName = "AQUAMAPS_SUITABLE"; - String qualityOperationName = "QUALITY_ANALYSIS"; - String discrepancyOperationName = "DISCREPANCY_ANALYSIS"; - String finalDistributionKeyColumn = "csquarecode"; - - for (int i =0;i "+finalDistributionTable[i]); - // Generate - GeneratorT.generate(GeneratorT.getGenerationConfig(numberOfResources, generatorName, envelopeOutputTable[i], preprocessedTable, speciesCode, userName, csquareTables[i], finalDistributionTable[i], configPath)); - } - System.out.println("COMPUTATION FINISHED!"); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/InterpolateTables.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/InterpolateTables.java deleted file mode 100644 index f3e9226..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/InterpolateTables.java +++ /dev/null @@ -1,280 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation.bioclimate; - -import java.io.File; -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.List; - -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.DataTypeRecognizer; -import org.gcube.contentmanagement.lexicalmatcher.analysis.core.LexicalEngineConfiguration; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; -import org.gcube.dataanalysis.ecoengine.utils.Operations; -import org.hibernate.SessionFactory; - -/** - * checks if two tables are equal checks numbers at the second decimal position - */ -public class InterpolateTables { - - // connection setup - - protected String temporaryDirectory; - // selection query - public static String selectElementsQuery = "select %1$s from %2$s order by %3$s"; - public static String selectDestElementsQuery = "select %1$s from %2$s where %3$s"; - public float status; - private String[] interpolatedTables; - private LexicalEngineConfiguration config; - // database connections - protected SessionFactory referencedbConnection; - private String configPath; - private File[] producedfiles; - - public static enum INTERPOLATIONFUNCTIONS { - LINEAR, PARABOLIC - }; - - public String[] getInterpolatedTables() { - return interpolatedTables; - } - - // init connections - public InterpolateTables(String configPath, String persistencePath, String databaseURL, String databaseUserName, String databasePassword) throws Exception { - this.configPath = configPath; - this.temporaryDirectory = persistencePath; - if (!configPath.endsWith("/")) - configPath += "/"; - if (!persistencePath.endsWith("/")) - this.temporaryDirectory += "/"; - - AnalysisLogger.setLogger(configPath + AlgorithmConfiguration.defaultLoggerFile); - - AnalysisLogger.getLogger().debug("Initialization complete: persistence path " + persistencePath); - - config = new LexicalEngineConfiguration(); - config.setDatabaseURL(databaseURL); - config.setDatabaseUserName(databaseUserName); - config.setDatabasePassword(databasePassword); - } - - // tables have to present the same structure - public void interpolate(String table1, String table2, int intervals, INTERPOLATIONFUNCTIONS function, int startYear, int endYear) throws Exception { - - try { - if (intervals == 1) { - interpolatedTables = new String[2]; - interpolatedTables[0] = table1; - interpolatedTables[1] = table2; - AnalysisLogger.getLogger().debug("NO TABLES TO PRODUCE"); - } else { - referencedbConnection = DatabaseFactory.initDBConnection(configPath + AlgorithmConfiguration.defaultConnectionFile, config); - AnalysisLogger.getLogger().debug("ReferenceDB initialized"); - status = 0f; - AnalysisLogger.getLogger().debug("Interpolating from " + table1 + " to " + table2); - DatabaseUtils utils = new DatabaseUtils(referencedbConnection); - // analyze table and take information about it - String createTableStatement = utils.buildCreateStatement(table1, "%1$s"); - AnalysisLogger.getLogger().debug("Create Statement for table " + table1 + ": " + createTableStatement); - int numberOfColumns = utils.getColumnDecriptions().size(); - // initialize the map of columns to write - List> outputFiles = new ArrayList>(); - for (int g = 0; g < intervals - 2; g++) { - outputFiles.add(new ArrayList()); - } - float statusstep = 60f / (float) numberOfColumns; - // DatabaseFactory.executeSQLUpdate(creationStatement, referencedbConnection); - // take the columns - for (int j = 0; j < numberOfColumns; j++) { - // take column name - String gotColumn = utils.getColumnName(j); - String gotColumnType = utils.getColumnType(j); - String javatype = DataTypeRecognizer.transformTypeFromDB(gotColumnType); - List takeFirstColumn = DatabaseFactory.executeSQLQuery(DatabaseUtils.getOrderedElements(table1, utils.getPrimaryKey(), gotColumn), referencedbConnection); - List takeSecondColumn = DatabaseFactory.executeSQLQuery(DatabaseUtils.getOrderedElements(table2, utils.getPrimaryKey(), gotColumn), referencedbConnection); - - // only if data are of numeric type, perform calculation - if (javatype.equals(BigDecimal.class.getName())) { - AnalysisLogger.getLogger().debug("interpolating -> " + gotColumn); - - List> interpolations = interpolateColumns(takeFirstColumn, takeSecondColumn, intervals, gotColumnType, function); - - for (int i = 1; i < intervals - 1; i++) { - // create the interpolation table - String tableInterp = table1 + "_" + (i); - // for each column to substitute - List columnToSub = interpolations.get(i); - if (columnToSub.size() > 0) { - AnalysisLogger.getLogger().debug("UPDATE TABLE " + tableInterp + " ON COLUMN " + gotColumn); - addColumnToTable(outputFiles.get(i - 1), columnToSub, true); - } else { - AnalysisLogger.getLogger().debug("DOESN'T CHANGE TABLE " + tableInterp + " COLUMN " + gotColumn); - addColumnToTable(outputFiles.get(i - 1), takeFirstColumn, true); - } - } - } - // else update all the tables - else { - for (int i = 0; i < intervals - 2; i++) { - addColumnToTable(outputFiles.get(i), takeFirstColumn, false); - } - } - - status = status + statusstep; - - } - status = 60f; - AnalysisLogger.getLogger().debug("WRITING ALL THE BUFFERS"); - writeAllStringBuffersToFiles(table1, outputFiles, function, startYear, endYear); - - statusstep = 40f / (float) producedfiles.length; - - interpolatedTables = new String[producedfiles.length + 2]; - interpolatedTables[0] = table1; - - for (int i = 0; i < producedfiles.length; i++) { - String filename = producedfiles[i].getName(); - filename = filename.substring(0, filename.lastIndexOf(".")); - interpolatedTables[i + 1] = filename; - String copyFileQuery = DatabaseUtils.copyFileToTableStatement(temporaryDirectory + producedfiles[i].getName(), filename); - // create Table - AnalysisLogger.getLogger().debug("CREATING TABLE->" + filename); - DatabaseFactory.executeSQLUpdate(String.format(createTableStatement, filename), referencedbConnection); - AnalysisLogger.getLogger().debug("FULFILLING TABLE->" + filename + ": " + copyFileQuery); - DatabaseFactory.executeSQLUpdate(copyFileQuery, referencedbConnection); - status = Math.min(status + statusstep, 99); - } - - AnalysisLogger.getLogger().debug("DELETING ALL TEMPORARY FILES"); - for (int i = 0; i < producedfiles.length; i++) { - producedfiles[i].delete(); - } - - interpolatedTables[interpolatedTables.length - 1] = table2; - - AnalysisLogger.getLogger().debug("ALL TABLES HAVE BEEN PRODUCED"); - }//end else control on the number of intervals - } catch (Exception e) { - e.printStackTrace(); - throw e; - } finally { - // close connections - if (referencedbConnection!=null) - referencedbConnection.close(); - status = 100f; - } - } - - public float getStatus() { - return status; - } - - private void addColumnToTable(List rows, List elements, boolean isNumber) { - int size = elements.size(); - for (int i = 0; i < size; i++) { - Object[] couple = (Object[]) elements.get(i); - String value = "" + couple[1]; - StringBuffer buffer = null; - if (i >= rows.size()) { - buffer = new StringBuffer(); - if (isNumber && (value == null) || (value.length() == 0)) - buffer.append("0"); - else - buffer.append(value); - rows.add(buffer); - } else { - buffer = rows.get(i); - buffer.append(";" + value); - } - - } - } - - private void writeAllStringBuffersToFiles(String initialFile, List> outputFiles, INTERPOLATIONFUNCTIONS function, int startYear, int endYear) throws Exception { - int numOfFiles = outputFiles.size(); - int yearStep = (int) ((float) (endYear - startYear) / (float) (numOfFiles + 1)); - producedfiles = new File[numOfFiles]; - for (int i = 0; i < numOfFiles; i++) { - List rows = outputFiles.get(i); - StringBuffer completeFile = new StringBuffer(); - int nrows = rows.size(); - for (int k = 0; k < nrows; k++) { - completeFile.append(rows.get(k) + "\n"); - } - int yearCals = startYear + (i + 1) * yearStep; - if (yearCals == endYear) - yearCals = endYear - 1; - - String filename = temporaryDirectory + initialFile + "_" + (yearCals) + "_" + function.name() + "_" + i + System.currentTimeMillis() + ".csv"; - FileTools.saveString(filename, completeFile.toString(), true, "UTF-8"); - producedfiles[i] = new File(filename); - } - } - - // interpolates parallel columns - private List> interpolateColumns(List col1, List col2, int intervals, String type, INTERPOLATIONFUNCTIONS function) { - int elements = col1.size(); - ArrayList> columns = new ArrayList>(); - for (int i = 0; i < intervals; i++) { - columns.add(new ArrayList()); - } - // produce a column couple for each interval - boolean interping = true; - for (int i = 0; i < elements; i++) { - Object[] row1 = (Object[]) col1.get(i); - Object[] row2 = (Object[]) col2.get(i); - double firstNum = row1[1] != null ? Double.parseDouble("" + row1[1]) : 0d; - double secondNum = row2[1] != null ? Double.parseDouble("" + row2[1]) : 0d; - Object key = row1[0]; - double[] interpolation = null; - if (firstNum != secondNum) { - if (interping) { - AnalysisLogger.getLogger().debug("Interpolating ... "); - interping = false; - } - - if (function == INTERPOLATIONFUNCTIONS.LINEAR) - interpolation = Operations.linearInterpolation(firstNum, secondNum, intervals); - else if (function == INTERPOLATIONFUNCTIONS.PARABOLIC) - interpolation = Operations.parabolicInterpolation(firstNum, secondNum, intervals); - } - - for (int j = 0; j < intervals; j++) { - Object[] couple = new Object[2]; - couple[0] = key; - double interp = firstNum; - if (interpolation != null) - interp = interpolation[j]; - - if (type.equals("integer")) - couple[1] = Math.round(interp); - else { - interp = MathFunctions.roundDecimal(interp, 2); - couple[1] = interp; - } - columns.get(j).add(couple); - } - - } - - return columns; - } - - public static void main(String[] args) throws Exception { - - String configPath = "./cfg/"; - String persistencePath = "c:/tmp/"; - String databaseUrl = "jdbc:postgresql://localhost/testdb"; - String databaseUser = "gcube"; - String databasePassword = "d4science2"; - InterpolateTables interp = new InterpolateTables(configPath, persistencePath, databaseUrl, databaseUser, databasePassword); - - interp.interpolate("hcaf_d", "hcaf_d_2050", 10, INTERPOLATIONFUNCTIONS.LINEAR, 2012, 2050); - - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ProduceTestMap.java b/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ProduceTestMap.java deleted file mode 100644 index 5d0bb55..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/evaluation/bioclimate/ProduceTestMap.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.evaluation.bioclimate; - -import org.gcube.dataanalysis.ecoengine.user.GeneratorT; -import org.gcube.dataanalysis.ecoengine.user.ModelerT; - -public class ProduceTestMap { - - public static void main(String[] args) throws Exception { - String configPath = "./cfg/"; - - final String[] csquareTables = { "hcaf_d_2016_PARABOLIC", "hcaf_d_2020_PARABOLIC", "hcaf_d_2024_PARABOLIC", "hcaf_d_2028_PARABOLIC", "hcaf_d_2032_PARABOLIC", "hcaf_d_2036_PARABOLIC", "hcaf_d_2040_PARABOLIC", "hcaf_d_2044_PARABOLIC"}; - final String [] envelopeOutputTable = {"hspen_validation_1","hspen_validation_2","hspen_validation_3","hspen_validation_4","hspen_validation_5","hspen_validation_6","hspen_validation_7","hspen_validation_8"}; - final String [] finalDistributionTable = { "hspec_v_0", "hspec_v_1", "hspec_v_2", "hspec_v_3", "hspec_v_4", "hspec_v_5", "hspec_v_6", "hspec_v_7", "hspec_v_8"}; - - /* - String [] csquareTable = {"hcaf_d","hcaf_d_2050"}; - String [] envelopeOutputTable = {"hspen_micro_0","hspen_micro_2050"}; - String [] finalDistributionTable = {"hspec_0","hspec_2050"}; - */ - - String preprocessedTable = "maxminlat_hspen"; - String envelopeTable = "hspen_validation"; - String occurrenceCells = "occurrenceCells"; - - String commonkeycolumn = "csquarecode"; - String probabilitycolumn = "probability"; - - int numberOfResources = 4; - - String speciesCode = "Fis-10199"; - String userName = "gianpaolo.coro"; - String modelName = "HSPEN"; - String generatorName = "AQUAMAPS_SUITABLE"; - String qualityOperationName = "QUALITY_ANALYSIS"; - String discrepancyOperationName = "DISCREPANCY_ANALYSIS"; - String finalDistributionKeyColumn = "csquarecode"; - - for (int i =0;i getInputParameters(); - - public String getResourceLoad(); - - public String getResources(); - - public float getStatus(); - - //gets the weight of the generator: according to this the generator will be placed in the execution order - public INFRASTRUCTURE getInfrastructure(); - - // gets the type of the content inside the generator: String, File, HashMap. - public VARTYPE getContentType(); - - // gets the content of the model: e.g. Table indications etc. - public Object getContent(); - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java deleted file mode 100644 index 4d729d9..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/DataAnalysis.java +++ /dev/null @@ -1,124 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import java.util.HashMap; -import java.util.List; - -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; -import org.gcube.dataanalysis.ecoengine.utils.ResourceFactory; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; - -/** - * Implements a mono-thread data analysis process - * Status is managed outside the class and analysis is simply conducted by initializing and applying procedure - * @author coro - * - */ -public abstract class DataAnalysis implements Evaluator{ - - protected ResourceFactory resourceManager; - protected int processedRecords; - protected float status; - - /** - * establishes imput parameters for this algorithm along with their type - */ - public abstract HashMap getInputParameters(); - - /** - * lists the output parameters names - * @return - */ - public abstract List getOutputParameters(); - - /** - * Executed the core of the algorithm - * @param config - * @return - * @throws Exception - */ - public abstract HashMap analyze(AlgorithmConfiguration config) throws Exception; - - /** - * initializes the procedure e.g. connects to the database - * @param config - * @throws Exception - */ - public abstract void init(AlgorithmConfiguration config) throws Exception; - - /** - * ends the processing, e.g. closes connections - * @throws Exception - */ - public abstract void end(); - - /** - * Processing skeleton : init-analyze-end - * @param config - * @return - * @throws Exception - */ - public HashMap process(AlgorithmConfiguration config) throws Exception{ - status = 0; - HashMap out = new HashMap(); - try{ - init(config); - out = analyze(config); - end(); - }catch(Exception e){ - e.printStackTrace(); - throw e; - } - finally{ - status = 100; - } - return out; - } - - /** - * calculates the number of processed records per unity of time: the timing is calculated internally by the resourceManager and used when the method is interrogated - */ - @Override - public String getResourceLoad() { - if (resourceManager==null) - resourceManager = new ResourceFactory(); - return resourceManager.getResourceLoad(processedRecords); - } - - /** - * gets the occupancy of the resource: in this case one thread - */ - @Override - public String getResources() { - return ResourceFactory.getResources(100f); - } - - /** - * The weight of this procedure is the lowest as it runs on local machine - */ - @Override - public INFRASTRUCTURE getInfrastructure() { - return INFRASTRUCTURE.LOCAL; - } - - /** - * gets the internal status of the operation - */ - @Override - public float getStatus() { - return status; - } - - /** - * visualizes the results of the analysis - * @param results - */ - public static void visualizeResults(HashMap results){ - - for (String key:results.keySet()){ - System.out.println(key+":"+results.get(key)); - } - } - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java deleted file mode 100644 index c490165..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/Evaluator.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import java.util.HashMap; - -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; - - -public interface Evaluator extends ComputationalAgent{ - - - public HashMap process(AlgorithmConfiguration config) throws Exception; - - public abstract void init(AlgorithmConfiguration config) throws Exception; - - public abstract void end(); - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java deleted file mode 100644 index 9eea11e..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/Generator.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; - -public interface Generator extends ComputationalAgent{ - - public ALG_PROPS[] getSupportedAlgorithms(); - - public INFRASTRUCTURE getInfrastructure(); - - public void init() throws Exception; - - public void setConfiguration(AlgorithmConfiguration config); - - public void shutdown(); - - public String getLoad(); - - public void generate() throws Exception; - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/Model.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/Model.java deleted file mode 100644 index 9fc6f04..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/Model.java +++ /dev/null @@ -1,50 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import java.util.HashMap; - -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; - -public interface Model { - - - //defines the properties of this algorithm - public ALG_PROPS[] getProperties(); - - //defines the name of this model - public String getName(); - - //gets the description of the model - public String getDescription(); - - //set the input parameters for this generator - public HashMap getInputParameters(); - - public float getVersion(); - - public void setVersion(float version); - - public void init(AlgorithmConfiguration Input, Model previousModel); - - public String getResourceLoad(); - - public String getResources(); - - public float getStatus(); - - public String getInputType(); - - public String getOutputType(); - - public void postprocess(AlgorithmConfiguration Input, Model previousModel); - - public void train(AlgorithmConfiguration Input, Model previousModel); - - public void stop(); - - public VARTYPE getContentType(); - - public Object getContent(); -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/Modeler.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/Modeler.java deleted file mode 100644 index 413af44..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/Modeler.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; - -public interface Modeler extends ComputationalAgent{ - - public ALG_PROPS[] getSupportedModels(); - - //gets the weight of the generator: according to this the generator will be placed in the execution order - public INFRASTRUCTURE getInfrastructure(); - - public void setmodel(Model model); - - public void model(AlgorithmConfiguration Input, Model previousModel); - - public void model(AlgorithmConfiguration Input); - - public void stop(); - - public Model getModel(); - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistribution.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistribution.java deleted file mode 100644 index 05d3eb8..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistribution.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import java.util.HashMap; - -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; - -public interface SpatialProbabilityDistribution { - - //defines the properties of this algorithm - public ALG_PROPS[] getProperties(); - - //defines the name of this algorithm - public String getName(); - - //gets the description of the algorithm - public String getDescription(); - - //set the input parameters for this generator - public HashMap getInputParameters(); - - public VARTYPE getContentType(); - - public Object getContent(); - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionGeneric.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionGeneric.java deleted file mode 100644 index 7b1d26b..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionGeneric.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import java.util.List; -import java.util.Map; - -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; - -//implements a SpatialProbabilityDistribution where data are taken from a Database -public interface SpatialProbabilityDistributionGeneric extends SpatialProbabilityDistribution{ - - //initialization of the distribution model - public void init(AlgorithmConfiguration config); - - public String getMainInfoType(); - - public String getGeographicalInfoType(); - - public List getMainInfoObjects(); - - //get the way geographical information will be taken - public List getGeographicalInfoObjects(); - - //calculate a single step of probability - public float calcProb(Object mainInfo,Object area); - - //preprocessing before calculating a single probability value - public void singleStepPreprocess(Object mainInfo,Object area); - - //postprocessing after calculating a single probability value - public void singleStepPostprocess(Object mainInfo,Object allAreasInformation); - - //preprocessing after the whole calculation - public void postProcess(); - - //store the result of the probability distribution model: e.g. for the input species -> csquare , probability - public void storeDistribution(Map> distribution); - - //get the internal processing status for the single step calculation - public float getInternalStatus(); - - //get a unique identifier for the object representing the main information , e.g. speciesID representing the first element to be put in the species probability insert - public String getMainInfoID(Object mainInfo); - - //get a unique identifier for the geographical information: e.g. csquarecode representing the second element to be put in the species probability insert - public String getGeographicalID(Object geoInfo); - - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionNode.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionNode.java deleted file mode 100644 index 5667bd3..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionNode.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; - -public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDistribution { - - //initialization of the distribution model - public void initSingleNode(AlgorithmConfiguration config); - - //preprocessing after the whole calculation - public void postProcess(); - - //get the internal processing status for the single step calculation - public float getInternalStatus(); - - public int executeNode(int cellOrdinal, int chunksize, int speciesOrdinal, int speciesChunkSize, String pathToFiles, String logfile); - - public void setup(AlgorithmConfiguration config) throws Exception; - - public int getNumberOfSpecies(); - - public int getNumberOfGeoInfo(); - - public int getOverallProcessedInfo(); - - public void stop(); -} diff --git a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionTable.java b/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionTable.java deleted file mode 100644 index f338dbb..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/interfaces/SpatialProbabilityDistributionTable.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.interfaces; - -import java.util.Queue; - -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.hibernate.SessionFactory; - -//implements a SpatialProbabilityDistribution where data are taken from a Database -public interface SpatialProbabilityDistributionTable extends SpatialProbabilityDistribution{ - - //define the properties of this algorithm - public ALG_PROPS[] getProperties(); - - //initialization of the distribution model - public void init(AlgorithmConfiguration config,SessionFactory dbHibConnection); - - //get the way principal info will be queried - public String getMainInfoQuery(); - - //get the way geographical information will be taken - public String getGeographicalInfoQuery(); - - //get the structure of the table which will contain the prob distribution - public String getDistributionTableStatement(); - - //calculate a single step of probability - public float calcProb(Object mainInfo,Object area); - - //get additonal metadata list to be put in the final table - public String getAdditionalMetaInformation(); - - //get the additional content to be put in the final table according to the Metadata - public String getAdditionalInformation(Object mainInfo,Object area); - - //preprocessing before calculating a single probability value - public void singleStepPreprocess(Object mainInfo,Object area); - - //postprocessing after calculating a single probability value - public void singleStepPostprocess(Object mainInfo,Object allAreasInformation); - - //preprocessing after the whole calculation - public void postProcess(); - - //get the internal processing status for the single step calculation - public float getInternalStatus(); - - //get a unique identifier for the object representing the main information , e.g. speciesID representing the first element to be put in the species probability insert - public String getMainInfoID(Object mainInfo); - - //get a unique identifier for the geographical information: e.g. csquarecode representing the second element to be put in the species probability insert - public String getGeographicalID(Object geoInfo); - - //apply a filter to a single table row representing a probability point - public String filterProbabiltyRow(String probabiltyRow); - - //apply a bulk filter when a synchronous write is enabled - public Queue filterProbabilitySet(Queue probabiltyRows); - - //indicate if the write of the probability rows will be during the overall computation for a single mainInformation object or after the whole processing - public boolean isSynchronousProbabilityWrite(); -} diff --git a/src/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java b/src/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java deleted file mode 100644 index 709c887..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/modeling/SimpleModeler.java +++ /dev/null @@ -1,84 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.modeling; - -import java.util.HashMap; - -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.configuration.INFRASTRUCTURE; -import org.gcube.dataanalysis.ecoengine.interfaces.Model; -import org.gcube.dataanalysis.ecoengine.interfaces.Modeler; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; - -public class SimpleModeler implements Modeler{ - private Model innermodel; - - @Override - public void model(AlgorithmConfiguration Input, Model previousModel) { - innermodel.init(Input, previousModel); - innermodel.train(Input, previousModel); - innermodel.postprocess(Input, previousModel); - } - - @Override - public void model(AlgorithmConfiguration Input) { - innermodel.init(Input, null); - innermodel.train(Input, null); - innermodel.postprocess(Input, null); - } - - @Override - public String getResourceLoad() { - return innermodel.getResourceLoad(); - } - - @Override - public String getResources() { - return innermodel.getResources(); - } - - @Override - public Model getModel() { - return innermodel; - } - - @Override - public void setmodel(Model model) { - innermodel = model; - } - - @Override - public float getStatus() { - return innermodel.getStatus(); - } - - @Override - public void stop() { - innermodel.stop(); - } - - @Override - public ALG_PROPS[] getSupportedModels() { - ALG_PROPS[] props = {ALG_PROPS.SPECIES_ENVELOPES,ALG_PROPS.SPECIES_MODEL}; - return props; - } - - @Override - public HashMap getInputParameters() { - return innermodel.getInputParameters(); - } - - @Override - public INFRASTRUCTURE getInfrastructure() { - return INFRASTRUCTURE.LOCAL; - } - - public VARTYPE getContentType() { - return innermodel.getContentType(); - } - - public Object getContent() { - return innermodel.getContent(); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNN.java b/src/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNN.java deleted file mode 100644 index 26d2893..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNN.java +++ /dev/null @@ -1,220 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.ObjectOutputStream; -import java.util.HashMap; -import java.util.List; - -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.interfaces.Model; -import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; -import org.hibernate.SessionFactory; - -public class ModelAquamapsNN implements Model { - - @Override - public ALG_PROPS[] getProperties() { - ALG_PROPS[] props = { ALG_PROPS.SPECIES_MODEL }; - return props; - } - - @Override - public String getName() { - return "AQUAMAPSNN"; - } - - @Override - public String getDescription() { - return "Aquamaps Trained using Neural Networks"; - } - - @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); - parameters.put("AbsenceDataTable", new VarCouple(VARTYPE.STRING, "absence_data")); - parameters.put("PresenceDataTable", new VarCouple(VARTYPE.STRING, "presence_data")); - parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, "")); - parameters.put("LayersNeurons", new VarCouple(VARTYPE.STRING, "100,2")); - - parameters.put("UserName", new VarCouple(VARTYPE.SERVICE, "")); - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - return parameters; - } - - @Override - public float getVersion() { - return 0; - } - - @Override - public void setVersion(float version) { - } - - SessionFactory connection; - String fileName; - String presenceTable; - String absenceTable; - float status; - int[] layersNeurons = {100, 2}; - - @Override - public void init(AlgorithmConfiguration Input, Model previousModel) { - AnalysisLogger.setLogger(Input.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - - // init the database - String defaultDatabaseFile = Input.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile; - - Input.setDatabaseDriver(Input.getParam("DatabaseDriver")); - Input.setDatabaseUserName(Input.getParam("DatabaseUserName")); - Input.setDatabasePassword(Input.getParam("DatabasePassword")); - Input.setDatabaseURL(Input.getParam("DatabaseURL")); - - try { - connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().trace("ERROR initializing connection"); - } - - fileName = Input.getPersistencePath() + "neuralnetwork_" + Input.getParam("SpeciesName") + "_" + Input.getParam("UserName"); - presenceTable = Input.getParam("PresenceDataTable"); - absenceTable = Input.getParam("AbsenceDataTable"); - - String layersNeurons$ = Input.getParam("LayersNeurons"); - if ((layersNeurons$!=null)&&(layersNeurons$.length()>0)) - { - String [] split = layersNeurons$.split(","); - layersNeurons = new int[split.length]; - for (int i = 0;i presences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, presenceTable), connection); - // take all absence inputs -// AnalysisLogger.getLogger().trace("presence "+String.format(takeElementsQuery, presenceTable)); -// AnalysisLogger.getLogger().trace("absence "+String.format(takeElementsQuery, absenceTable)); - List absences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, absenceTable), connection); - int numbOfPresence = presences.size(); - int numbOfAbsence = absences.size(); - - // setup Neural Network - int numberOfInputNodes = 11; - int numberOfOutputNodes = 1; - int[] innerLayers = Neural_Network.setupInnerLayers(layersNeurons); - Neural_Network nn = new Neural_Network(numberOfInputNodes, numberOfOutputNodes, innerLayers, Neural_Network.ACTIVATIONFUNCTION.SIGMOID); - - - int numberOfInputs = numbOfPresence + numbOfAbsence; - double[][] in = new double[numberOfInputs][]; - double[][] out = new double[numberOfInputs][]; - // build NN input - for (int i = 0; i < numbOfPresence; i++) { - in[i] = Neural_Network.preprocessObjects((Object[]) presences.get(i)); - out[i] = nn.getPositiveCase(); - } - for (int i = numbOfPresence; i < numberOfInputs; i++) { - in[i] = Neural_Network.preprocessObjects((Object[]) absences.get(i-numbOfPresence)); - out[i] = nn.getNegativeCase(); - } - - // train the NN - nn.train(in, out); - save(fileName, nn); - - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("ERROR during training"); - } - status = 100f; - } - - public VARTYPE getContentType() { - return VARTYPE.FILE; - } - - public Object getContent() { - return new File(fileName); - - } - - @Override - public void stop() { - - } - - - public static void save(String nomeFile, Neural_Network nn) { - - File f = new File(nomeFile); - FileOutputStream stream = null; - try { - stream = new FileOutputStream(f); - ObjectOutputStream oos = new ObjectOutputStream(stream); - oos.writeObject(nn); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("ERROR in writing object on file: " + nomeFile); - } finally { - try { - stream.close(); - } catch (IOException e) { - } - } - AnalysisLogger.getLogger().trace("OK in writing object on file: " + nomeFile); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNNNS.java b/src/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNNNS.java deleted file mode 100644 index 0d250e2..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/ModelAquamapsNNNS.java +++ /dev/null @@ -1,225 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.ObjectOutputStream; -import java.util.HashMap; -import java.util.List; - -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.interfaces.Model; -import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.Neural_Network; -import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.NeuralNet; -import org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions.Pattern; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; -import org.hibernate.SessionFactory; - -public class ModelAquamapsNNNS implements Model { - - @Override - public ALG_PROPS[] getProperties() { - ALG_PROPS[] props = { ALG_PROPS.SPECIES_MODEL }; - return props; - } - - @Override - public String getName() { - return "AQUAMAPSNNNS"; - } - - @Override - public String getDescription() { - return "Aquamaps Trained using Neural Networks"; - } - - @Override - public HashMap getInputParameters() { - HashMap parameters = new HashMap(); - parameters.put("AbsenceDataTable", new VarCouple(VARTYPE.STRING, "absence_data")); - parameters.put("PresenceDataTable", new VarCouple(VARTYPE.STRING, "presence_data")); - parameters.put("SpeciesName", new VarCouple(VARTYPE.STRING, "")); - parameters.put("UserName", new VarCouple(VARTYPE.STRING, "")); - - parameters.put("DatabaseUserName", new VarCouple(VARTYPE.DATABASEUSERNAME, "")); - parameters.put("DatabasePassword", new VarCouple(VARTYPE.DATABASEPASSWORD, "")); - parameters.put("DatabaseURL", new VarCouple(VARTYPE.DATABASEURL, "")); - parameters.put("DatabaseDriver", new VarCouple(VARTYPE.DATABASEDRIVER, "")); - return parameters; - } - - @Override - public float getVersion() { - return 0; - } - - @Override - public void setVersion(float version) { - } - - SessionFactory connection; - String fileName; - String presenceTable; - String absenceTable; - float status; - - @Override - public void init(AlgorithmConfiguration Input, Model previousModel) { - AnalysisLogger.setLogger(Input.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - - // init the database - String defaultDatabaseFile = Input.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile; - - Input.setDatabaseDriver(Input.getParam("DatabaseDriver")); - Input.setDatabaseUserName(Input.getParam("DatabaseUserName")); - Input.setDatabasePassword(Input.getParam("DatabasePassword")); - Input.setDatabaseURL(Input.getParam("DatabaseURL")); - - try { - connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, Input); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().trace("ERROR initializing connection"); - } - - fileName = Input.getPersistencePath() + "neuralnetwork_" + Input.getParam("SpeciesName") + "_" + Input.getParam("UserName"); - presenceTable = Input.getParam("PresenceDataTable"); - absenceTable = Input.getParam("AbsenceDataTable"); - - } - - @Override - public String getResourceLoad() { - // TODO Auto-generated method stub - return null; - } - - @Override - public String getResources() { - // TODO Auto-generated method stub - return null; - } - - @Override - public float getStatus() { - return status; - } - - @Override - public String getInputType() { - return AlgorithmConfiguration.class.getName(); - } - - @Override - public String getOutputType() { - return String.class.getName(); - } - - @Override - public void postprocess(AlgorithmConfiguration Input, Model previousModel) { - connection.close(); - } - - private String takeElementsQuery = "select depthmean,depthmax,depthmin, sstanmean,sbtanmean,salinitymean,salinitybmean, primprodmean,iceconann,landdist,oceanarea from %1$s d where oceanarea>0 limit 449"; - - @Override - public void train(AlgorithmConfiguration Input, Model previousModel) { - - try { - // take all presence inputs - List presences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, presenceTable), connection); - // take all absence inputs - List absences = DatabaseFactory.executeSQLQuery(String.format(takeElementsQuery, absenceTable), connection); - int numbOfPresence = presences.size(); - int numbOfAbsence = absences.size(); - - // setup Neural Network - int numberOfInputNodes = 11; - int numberOfOutputNodes = 1; -// int[] innerLayers = Neural_Network.setupInnerLayers(100,30,10); -// int[] innerLayers = NeuralNet.setupInnerLayers(100,10,30); - int[] innerLayers = NeuralNet.setupInnerLayers(140); - NeuralNet nn = new NeuralNet(numberOfInputNodes, numberOfOutputNodes, innerLayers); - - - int numberOfInputs = numbOfPresence + numbOfAbsence; - double[][] in = new double[numberOfInputs][]; - double[][] out = new double[numberOfInputs][]; - // build NN input - for (int i = 0; i < numbOfPresence; i++) { - in[i] = NeuralNet.preprocessObjects((Object[]) presences.get(i)); - out[i] = nn.getPositiveCase(); - Pattern pattern = new Pattern(in[i], out[i]); - nn.IncrementalTrain(.2, pattern); - AnalysisLogger.getLogger().debug("-> "+i); - } - for (int i = numbOfPresence; i < numberOfInputs; i++) { - in[i] = NeuralNet.preprocessObjects((Object[]) absences.get(i-numbOfPresence)); - out[i] = nn.getNegativeCase(); - Pattern pattern = new Pattern(in[i], out[i]); - nn.IncrementalTrain(.2, pattern); - AnalysisLogger.getLogger().debug("-> "+i); - } - - - /* - int numberOfInputs = numbOfPresence; - double[][] in = new double[numberOfInputs][]; - double[][] out = new double[numberOfInputs][]; - // build NN input - for (int i = 0; i < numbOfPresence; i++) { - in[i] = Neural_Network.preprocessObjects((Object[]) presences.get(i)); - out[i] = nn.getPositiveCase(); - } - */ - - // train the NN - save(fileName, nn); - - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("ERROR during training"); - } - status = 100f; - } - - public VARTYPE getContentType() { - return VARTYPE.FILE; - } - - public Object getContent() { - return fileName; - - } - - @Override - public void stop() { - - } - - - public static void save(String nomeFile, NeuralNet nn) { - - File f = new File(nomeFile); - FileOutputStream stream = null; - try { - stream = new FileOutputStream(f); - ObjectOutputStream oos = new ObjectOutputStream(stream); - oos.writeObject(nn); - } catch (Exception e) { - e.printStackTrace(); - AnalysisLogger.getLogger().error("ERROR in writing object on file: " + nomeFile); - } finally { - try { - stream.close(); - } catch (IOException e) { - } - } - AnalysisLogger.getLogger().trace("OK in writing object on file: " + nomeFile); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/ModelHSPEN.java b/src/org/gcube/dataanalysis/ecoengine/models/ModelHSPEN.java deleted file mode 100644 index 43fc226..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/ModelHSPEN.java +++ /dev/null @@ -1,443 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.gcube.contentmanagement.graphtools.utils.HttpRequest; -import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; -import org.gcube.dataanalysis.ecoengine.configuration.ALG_PROPS; -import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; -import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; -import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; -import org.gcube.dataanalysis.ecoengine.interfaces.Model; -import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.AquamapsEnvelopeAlgorithm; -import org.gcube.dataanalysis.ecoengine.models.cores.aquamaps.EnvelopeSet; -import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory; -import org.gcube.dataanalysis.ecoengine.utils.VARTYPE; -import org.gcube.dataanalysis.ecoengine.utils.VarCouple; -import org.hibernate.SessionFactory; - -public class ModelHSPEN implements Model { - - private float version; - // DB SESSION - protected SessionFactory connection; - // Queries - private static final String alterQuery = "UPDATE %HSPEN% SET %1$s WHERE speciesid = '%2$s'"; - private static final String dropHspenTable = "DROP TABLE %HSPEN%; "; - private static final String createHspenTable = "CREATE TABLE %HSPEN% ( speccode integer, speciesid character varying NOT NULL, lifestage character varying NOT NULL, faoareas character varying(100), faoareasref character varying, faocomplete smallint, nmostlat real, smostlat real, wmostlong real,emostlong real, lme character varying(180), depthyn smallint, depthmin integer, depthmax integer, depthprefmin integer, depthprefmax integer, meandepth smallint, depthref character varying, pelagic smallint, tempyn smallint, tempmin real, tempmax real, tempprefmin real, tempprefmax real, tempref character varying, salinityyn smallint, salinitymin real, salinitymax real, salinityprefmin real, salinityprefmax real, salinityref character varying, primprodyn smallint, primprodmin real, primprodmax real, primprodprefmin real, primprodprefmax real, primprodprefref character varying, iceconyn smallint, iceconmin real, iceconmax real, iceconprefmin real, iceconprefmax real, iceconref character varying, landdistyn smallint, landdistmin real, landdistmax real, landdistprefmin real, landdistprefmax real, landdistref character varying, remark character varying, datecreated timestamp without time zone, datemodified timestamp without time zone, expert integer, dateexpert timestamp without time zone, envelope smallint, mapdata smallint, effort smallint, layer character(1), usepoints smallint, rank smallint, CONSTRAINT %HSPEN%_pkey PRIMARY KEY (speciesid, lifestage))WITH ( OIDS=FALSE); CREATE INDEX envelope_%HSPEN%_idx ON %HSPEN% USING btree (envelope); CREATE INDEX mapdata_%HSPEN%_idx ON %HSPEN% USING btree (mapdata); CREATE INDEX speciesid_%HSPEN%_idx ON %HSPEN% USING btree (speciesid);"; - private static final String populateNewHspen = "insert into %HSPEN% (select * from %HSPEN_ORIGIN%);"; - private static final String speciesListQuery = "select distinct speciesid from %HSPEN%;"; - private static final String hspenListQuery = "select speciesid, layer, iceconmin , iceconmax , iceconprefmin , iceconprefmax , salinitymin , salinitymax , salinityprefmin , salinityprefmax , landdistmin , landdistmax , landdistprefmin , landdistprefmax , tempmin , tempmax , tempprefmin , tempprefmax , primprodmin , primprodmax , primprodprefmin , primprodprefmax from %HSPEN%;"; - - // constants - String defaultDatabaseFile = "DestinationDBHibernate.cfg.xml"; - String defaultLogFile = "ALog.properties"; - private String dynamicAlterQuery; - private String dynamicDropTable; - private String dynamicCreateTable; - private String dynamicPopulateNewHspen; - private String dynamicSpeciesListQuery; - private String dynamicHspenInformationQuery; - private String currentHCAFTable; - private String currentOccurrenceTable; - private int numberOfthreads; - private ExecutorService executorService; - private boolean threadActivity[]; - private int countDifferences; - private boolean interruptProcessing; - private float status; - private int numbOfProcessedSpecies; - HashMap> allSpeciesHspen; - private int lastProcessedRecordsNumber; - private long lastTime; - AlgorithmConfiguration outconfig; - private String outputTable; - - @Override - public float getVersion() { - return version; - } - - @Override - public String getName() { - return "HSPEN"; - } - - - @Override - public void init(AlgorithmConfiguration setup, Model previousModel) { - outconfig = setup; - defaultDatabaseFile = setup.getConfigPath() + defaultDatabaseFile; - - AnalysisLogger.setLogger(setup.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile); - try { - String defaultDatabaseFile = setup.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile; - - setup.setDatabaseDriver(setup.getParam("DatabaseDriver")); - setup.setDatabaseUserName(setup.getParam("DatabaseUserName")); - setup.setDatabasePassword(setup.getParam("DatabasePassword")); - setup.setDatabaseURL(setup.getParam("DatabaseURL")); - - connection = DatabaseFactory.initDBConnection(defaultDatabaseFile, setup); - } catch (Exception e) { - AnalysisLogger.getLogger().debug(e); - e.printStackTrace(); - } - - outputTable = outconfig.getParam("OuputEnvelopeTable"); - // initialize queries - dynamicAlterQuery = alterQuery.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable")); - dynamicDropTable = dropHspenTable.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable")); - dynamicCreateTable = createHspenTable.replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable")); - dynamicPopulateNewHspen = populateNewHspen.replace("%HSPEN_ORIGIN%", outconfig.getParam("EnvelopeTable")).replace("%HSPEN%", outconfig.getParam("OuputEnvelopeTable")); - dynamicSpeciesListQuery = speciesListQuery.replace("%HSPEN%", outconfig.getParam("EnvelopeTable")); - dynamicHspenInformationQuery = hspenListQuery.replace("%HSPEN%", outconfig.getParam("EnvelopeTable")); - currentHCAFTable = outconfig.getParam("CsquarecodesTable"); - currentOccurrenceTable = outconfig.getParam("OccurrenceCellsTable"); - - // Threads - numberOfthreads = outconfig.getNumberOfResources(); - - // interrupt process - interruptProcessing = false; - status = 0; - } - - // populates the selectedSpecies variable by getting species from db - private List populateSpecies() { - AnalysisLogger.getLogger().trace("Distribution Generator ->getting all species list from DB"); - List allspecies = DatabaseFactory.executeSQLQuery(dynamicSpeciesListQuery, connection); - return allspecies; - } - - private HashMap> populateHspen() { - HashMap> allSpeciesHspen = new HashMap>(); - List SpeciesInfo = DatabaseFactory.executeSQLQuery(dynamicHspenInformationQuery, connection); - int lenSpecies = SpeciesInfo.size(); - - for (int i = 0; i < lenSpecies; i++) { - Object[] speciesArray = (Object[]) SpeciesInfo.get(i); - String speciesid = (String) speciesArray[0]; - List singleSpeciesInfo = new ArrayList(); - singleSpeciesInfo.add(speciesArray); - allSpeciesHspen.put((String) speciesid, singleSpeciesInfo); - } - - return allSpeciesHspen; - } - - // initializes threads activity status - public void initializeThreads(int numberOfThreadsToUse) { - // initialize threads and their activity state - executorService = Executors.newFixedThreadPool(numberOfThreadsToUse); - - threadActivity = new boolean[numberOfThreadsToUse]; - // initialize to false; - for (int j = 0; j < threadActivity.length; j++) { - threadActivity[j] = false; - } - - } - - // waits for thread to be free - private void wait4Thread(int index) { - - // wait until thread is free - while (threadActivity[index]) { - try { - Thread.sleep(10); - } catch (InterruptedException e) { - } - } - } - - // shutdown the connection - public void shutdownConnection() { - connection.close(); - } - - private void generateTable(Object Input) throws Exception { - AlgorithmConfiguration config = (AlgorithmConfiguration) Input; - // create and populate the novel table - if (config.getParam("CreateTable").equalsIgnoreCase("true")){ - AnalysisLogger.getLogger().trace("Distribution Generator->recreating new table " + dynamicCreateTable); - try{ - DatabaseFactory.executeSQLUpdate(String.format(dynamicDropTable, config.getDatabaseUserName()), connection); - }catch(Exception e){ - AnalysisLogger.getLogger().trace("Impossible to drop table - maybe not existing"); - } - try{ - DatabaseFactory.executeSQLUpdate(String.format(dynamicCreateTable, config.getDatabaseUserName()), connection); - }catch(Exception e){ - AnalysisLogger.getLogger().trace("Impossible to create table - maybe yet existing"); - } - - } - AnalysisLogger.getLogger().trace("Distribution Generator->populating new table " + dynamicPopulateNewHspen); - DatabaseFactory.executeSQLUpdate(dynamicPopulateNewHspen, connection); - } - - @Override - public void train(AlgorithmConfiguration Input, Model previousModel) { - long tstart = System.currentTimeMillis(); - // INITIALIZATION - try { - AnalysisLogger.getLogger().trace("ModelHSPENr->populating species"); - List allspecies = populateSpecies(); - allSpeciesHspen = populateHspen(); - - AnalysisLogger.getLogger().trace("ModelHSPENr->ENVELOPES GENERATION STARTED"); - - // initialize threads - initializeThreads(numberOfthreads); - // END INITIALIZATION - // generate the hspen table - generateTable(Input); - // thread selection index - int currentThread = 0; - // global chunks counter - int globalcounter = 0; - // count differences in hspen original and new hspen - countDifferences = 0; - // take time - long computationT0 = System.currentTimeMillis(); - int numberOfSpecies = allspecies.size(); - - // ENVELOPES CALCULATION - // cycle throw the species to generate - // one thread calculation for each species - for (Object species : allspecies) { - // get speciesID - String speciesid = (String) species; - if (speciesid.length() > 0) { - // calculation on multiple threads - AnalysisLogger.getLogger().trace("ModelHSPENr->ANALIZING SPECIES: " + speciesid); - // wait for thread to be free - wait4Thread(currentThread); - // start species information calculation on the thread - startNewTCalc(currentThread, speciesid); - // increment thread selection index - currentThread++; - // reset current thread index - if (currentThread >= numberOfthreads) - currentThread = 0; - // report probability - float s = (float) ((int) (((float) globalcounter * 100f / (numberOfSpecies)) * 100f)) / 100f; - status = (s == 100) ? 99 : s; - AnalysisLogger.getLogger().trace("STATUS->" + status + "%"); - - // increment global counter index - globalcounter++; - AnalysisLogger.getLogger().warn("Number of Found Differences: " + countDifferences); - } - - if (interruptProcessing) - break; - } - - // END OF CALCULATION CORE - - // wait for last threads to finish - for (int i = 0; i < numberOfthreads; i++) { - // free previous calculation - wait4Thread(i); - } - - long computationT1 = System.currentTimeMillis(); - AnalysisLogger.getLogger().warn("All Envelopes Computation Finished in " + (computationT1 - computationT0) + " ms"); - AnalysisLogger.getLogger().warn("Number of Overall Found Differences: " + countDifferences); - } catch (Exception e) { - AnalysisLogger.getLogger().trace("Computation traminate prematurely: ", e); - } finally { - // shutdown threads - executorService.shutdown(); - // shutdown connection - shutdownConnection(); - // set completeness - status = 100.0f; - long tstop = System.currentTimeMillis(); - AnalysisLogger.getLogger().warn("All Envelopes Computation Finished in " + (tstop - tstart) + " ms"); - } - } - - // THREAD SECTION - // definition of the Thread - // calculates values for one species - private class ThreadCalculator implements Callable { - int index; - String species; - - public ThreadCalculator(int index, String species) { - this.species = species; - this.index = index; - } - - public Integer call() { - - try { - calcEnvelopes(species); - } catch (Exception e) { - AnalysisLogger.getLogger().trace("" + e); - e.printStackTrace(); - } - threadActivity[index] = false; - return 0; - } - } - - // end Definition of the Thread - // activation - private void startNewTCalc(int index, String species) { - threadActivity[index] = true; - ThreadCalculator tc = new ThreadCalculator(index, species); - executorService.submit(tc); - } - - // END OF THREAD SECTION - - // calculation for standalone mode - public void calcEnvelopes(String species) { - // take initial time - long t0 = System.currentTimeMillis(); - try { - // take information for the selected Species - List singleHspen = allSpeciesHspen.get(species); - // call all envelopes calculations - EnvelopeSet envSet = AquamapsEnvelopeAlgorithm.calculateEnvelopes(species, connection, currentOccurrenceTable, currentHCAFTable, (Object[]) singleHspen.get(0)); - String instruction = envSet.getEnvelopeString(); - // take the result of the calculation - long t1 = System.currentTimeMillis(); - AnalysisLogger.getLogger().trace("Computation for species " + species + " finished in " + (t1 - t0) + " ms"); - - if (instruction.length() > 0) { - countDifferences++; - // write results on the DB - String query = String.format(dynamicAlterQuery, instruction, species); - try { - AnalysisLogger.getLogger().trace("Envelope Generated - executing query: " + query); - DatabaseFactory.executeSQLUpdate(query, connection); - } catch (Exception e) { - AnalysisLogger.getLogger().trace("could not execute update"); - e.printStackTrace(); - // System.exit(0); - } - } - - } catch (Exception ex) { - AnalysisLogger.getLogger().trace("Computation traminated prematurely: ", ex); - } - numbOfProcessedSpecies++; - // take ending time - } - - public VARTYPE getContentType() { - return VARTYPE.HSPEN; - } - - public Object getContent() { - return outputTable; - } - - @Override - public void setVersion(float version) { - this.version = version; - } - - @Override - public void postprocess(AlgorithmConfiguration Input, Model previousModel) { - - } - - @Override - public String getResourceLoad() { - String returnString = ""; - try { - long tk = System.currentTimeMillis(); - // double activity = Double.valueOf(processedRecordsCounter)*1000.00/Double.valueOf(tk-tstart); - double activity = Double.valueOf(numbOfProcessedSpecies - lastProcessedRecordsNumber) * 1000.00 / Double.valueOf(tk - lastTime); - lastTime = tk; - lastProcessedRecordsNumber = numbOfProcessedSpecies; - - ResourceLoad rs = new ResourceLoad(tk, activity); - returnString = rs.toString(); - } catch (Exception e) { - e.printStackTrace(); - long tk = System.currentTimeMillis(); - returnString = new ResourceLoad(tk, 0).toString(); - } - - return returnString; - } - - @Override - //this methods gets information about the threads or the machines which are running the computation - public String getResources(){ - Resources res = new Resources(); - try{ - for (int i=0;i getInputParameters() { - HashMap parameters = new HashMap(); - parameters.put("EnvelopeTable", new VarCouple(VARTYPE.STRING,"hspen")); - parameters.put("CsquarecodesTable", new VarCouple(VARTYPE.STRING,"hcaf_d")); - parameters.put("OccurrenceCellsTable", new VarCouple(VARTYPE.STRING,"occurrencecells")); - parameters.put("CreateTable", new VarCouple(VARTYPE.STRING,"true")); - parameters.put("OuputEnvelopeTable", new VarCouple(VARTYPE.RANDOM,"hspen_")); - parameters.put("DatabaseUserName",new VarCouple(VARTYPE.DATABASEUSERNAME,"")); - parameters.put("DatabasePassword",new VarCouple(VARTYPE.DATABASEPASSWORD,"")); - parameters.put("DatabaseURL",new VarCouple(VARTYPE.DATABASEURL,"")); - parameters.put("DatabaseDriver",new VarCouple(VARTYPE.DATABASEDRIVER,"")); - return parameters; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/AquamapsEnvelope.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/AquamapsEnvelope.java deleted file mode 100644 index 629fc8b..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/AquamapsEnvelope.java +++ /dev/null @@ -1,182 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.List; - -import org.gcube.contentmanagement.graphtools.utils.MathFunctions; - -public class AquamapsEnvelope { - - public Double Min; - public Double PMin; - public Double Max; - public Double PMax; - - public static final double $TempUpper = 30; - public static final double $TempLower = -2; - - public static final double $SalinUpper = 40.2; - public static final double $SalinLower = 3.56; - - public static final double $ProdUpper = 6000; - public static final double $ProdLower = 0; - - public static final double $LandUpper = 4200; - public static final double $LandLower = 0; - - public static final double $SalinBUpper = 40.9; - public static final double $SalinBLower = 3.5; - - private float toleranceThr = 0.015f; //tolerance on relative error - - public static int round(double $n){ - - $n = Math.round($n * 100.00)/100.00; - - String $nstr = ""+$n; - int $dec_pos = $nstr.indexOf('.'); - int $final = 0; - - String $left_char = ""; - String $right_char = ""; - - if ($dec_pos>0) - { - $left_char=$nstr.substring($dec_pos-1,$dec_pos); - $right_char=$nstr.substring($dec_pos+1,$dec_pos+2); - } - if ($right_char.equals("5")) - { - if ( - $left_char.equals("0") || - $left_char.equals("2") || - $left_char.equals("4") || - $left_char.equals("6") || - $left_char.equals("8") - ) - { - $final = (int)Math.round($n)-1; - } - else - { - $final = (int)Math.round($n); - } - - } - else - { - $final = (int)Math.round($n); - } - - - return $final; - } - - public void calculatePercentiles(List speciesOccurrences, Double $Uppermost, Double $Lowermost){ - int position = 2; - int $reccount = speciesOccurrences.size(); - //compute positions of percentiles: 25th, 75th, 10th and 90th - int $Rec25 = round(25f * ($reccount + 1f) / 100f) - 1; //25 - int $Rec75 = round(75f * ($reccount + 1f) / 100f) - 1; //75 - int $Rec10 = 0; - int $Rec90 = 0; - - if ($reccount >= 10 && $reccount <= 13) - { - $Rec10 = round(10f * ($reccount + 1f) / 100f); - $Rec90 = round(90f * ($reccount + 1f) / 100f) - 2; - } - else - { - $Rec10 = round(10f * ($reccount + 1f) / 100f) - 1; - $Rec90 = round(90f * ($reccount + 1f) / 100f) - 1; - } - - //get percentiles -// $paramData->data_seek(0); - Object[] $row2 = (Object[])speciesOccurrences.get(0); - double $Min = AquamapsEnvelopeAlgorithm.getNumber($row2,position); - -// $paramData->data_seek($reccount - 1); - $row2 = (Object[])speciesOccurrences.get($reccount - 1); - double $Max = AquamapsEnvelopeAlgorithm.getNumber($row2,position); - -// $paramData->data_seek($Rec25); - $row2 = (Object[])speciesOccurrences.get($Rec25); - double $25 = AquamapsEnvelopeAlgorithm.getNumber($row2,position); - -// $paramData->data_seek($Rec75); - $row2 = (Object[])speciesOccurrences.get($Rec75); - double $75 = AquamapsEnvelopeAlgorithm.getNumber($row2,position); - -// $paramData->data_seek($Rec10); - $row2 = (Object[])speciesOccurrences.get($Rec10); - double $PMin = AquamapsEnvelopeAlgorithm.getNumber($row2,position); - -// $paramData->data_seek($Rec90); - $row2 = (Object[])speciesOccurrences.get($Rec90); - double $PMax = AquamapsEnvelopeAlgorithm.getNumber($row2,position); - - - if (($Uppermost!= null) && ($Lowermost != null)){ - //interquartile adjusting - double $InterQuartile = Math.abs($25 - $75); - double $ParaAdjMax = $75 + Double.valueOf(1.5) * $InterQuartile; - double $ParaAdjMin = $25 - Double.valueOf(1.5) * $InterQuartile; - - if ($ParaAdjMax < $Uppermost && $ParaAdjMax > $Max) - { - $Max = $ParaAdjMax; - } - if ($ParaAdjMin > $Lowermost && $ParaAdjMin < $Min) - { - $Min = $ParaAdjMin; - } - } - - Min = $Min; - Max = $Max; - PMin = $PMin; - PMax = $PMax; - } - - private static double relativeError(double realvalue,double calculatedvalue){ - double absoluteError = Math.abs(realvalue-calculatedvalue); - double relativeErr = 0; - double denominator = 1; - if (realvalue!=0) - denominator = realvalue; - - if (!((realvalue ==0) && (absoluteError==0))) - relativeErr = absoluteError/denominator; - - - -// AnalysisLogger.getLogger().debug("relative error "+relativeErr+" "+realvalue+" vs "+calculatedvalue); - return Math.abs(relativeErr); - } - - public boolean checkPrevious(Double prevMin,Double prevMax,Double prevPMin,Double prevPMax){ - try{ - if ((relativeError(prevMin,Min) tempvalues = new ArrayList(); - List salinityvalues = new ArrayList(); - List primprodvalues = new ArrayList(); - List icevalues = new ArrayList(); - List landdistvalues = new ArrayList(); - - List list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.TEMPERATURE); - for (OccurrencePoint op:list){ - tempvalues.add(op.toObjectArray()); - } - list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.SALINITY); - for (OccurrencePoint op:list){ - salinityvalues.add(op.toObjectArray()); - } - list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.PRIMARY_PRODUCTION); - for (OccurrencePoint op:list){ - primprodvalues.add(op.toObjectArray()); - } - list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.ICE_CONCENTRATION); - for (OccurrencePoint op:list){ - icevalues.add(op.toObjectArray()); - } - list = occurrencePointsList.getOccurrenceMap().get(""+EnvelopeName.LAND_DISTANCE); - for (OccurrencePoint op:list){ - landdistvalues.add(op.toObjectArray()); - } - //build up envelope set - EnvelopeSet envSet = calcEnv(species,singleSpeciesValues,tempvalues,salinityvalues,primprodvalues,icevalues,landdistvalues); - - return envSet; - } - - //the core of the procedure - public static EnvelopeSet calcEnv(String species, Object[] singleSpeciesValues, List tempvalues,List salinityvalues,List primprodvalues,List icevalues,List landdistvalues){ - if (tempvalues.size()<10){ - AnalysisLogger.getLogger().warn("WARNING: NOT ENOUGH OCCURRENCES FOR SPECIES: "+species); - AnalysisLogger.getLogger().warn("Leaving the hspen as is"); - return new EnvelopeSet(); - } - //take previousValues - Double prevIceMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,2); - Double prevIceMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,3); - Double prevIcePMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,4); - Double prevIcePMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,5); - Double prevSalinityMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,6); - Double prevSalinityMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,7); - Double prevSalinityPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,8); - Double prevSalinityPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,9); - Double prevLanddistMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,10); - Double prevLanddistMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,11); - Double prevLanddistPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,12); - Double prevLanddistPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,13); - Double prevTempMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,14); - Double prevTempMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,15); - Double prevTempPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,16); - Double prevTempPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,17); - Double prevPrimProdMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,18); - Double prevPrimProdMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,19); - Double prevPrimProdPMin = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,20); - Double prevPrimProdPMax = AquamapsEnvelopeAlgorithm.getNumber(singleSpeciesValues,21); - //previous values taken - String layer = getElement(singleSpeciesValues,1); - - SpEnv_temp tempEnv = new SpEnv_temp(); - tempEnv.calcEnvelope(layer, tempvalues); - - SpEnv_salinity salinityEnv = new SpEnv_salinity(); - salinityEnv.calcEnvelope(salinityvalues,layer); - - SpEnv_primprod primprodEnv = new SpEnv_primprod(); - primprodEnv.calcEnvelope(primprodvalues); - - SpEnv_seaice seaiceEnv = new SpEnv_seaice(); - seaiceEnv.calcEnvelope(icevalues); - - SpEnv_landdist landdistEnv = new SpEnv_landdist(); - landdistEnv.calcEnvelope(landdistvalues); - - - String addingElements = ""; - int countchunks = 0; - if (!tempEnv.checkPrevious(prevTempMin,prevTempMax,prevTempPMin,prevTempPMax)) - { - AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevTempMin+","+prevTempPMin+","+prevTempPMax+","+prevTempMax+" vs "+tempEnv.toString()); - addingElements+=tempEnv.toString(); - countchunks++; - } - if (!salinityEnv.checkPrevious(prevSalinityMin,prevSalinityMax,prevSalinityPMin,prevSalinityPMax)) - { - AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevSalinityMin+","+prevSalinityPMin+","+prevSalinityPMax+","+prevSalinityMax+" vs "+salinityEnv.toString()); - if (countchunks>0) - addingElements+=","; - addingElements+=salinityEnv.toString(); - countchunks++; - } - if (!primprodEnv.checkPrevious(prevPrimProdMin,prevPrimProdMax,prevPrimProdPMin,prevPrimProdPMax)) - { - AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevPrimProdMin+","+prevPrimProdPMin+","+prevPrimProdPMax+","+prevPrimProdMax+" vs "+primprodEnv.toString()); - if (countchunks>0) - addingElements+=","; - addingElements+=primprodEnv.toString(); - countchunks++; - } - if (!seaiceEnv.checkPrevious(prevIceMin,prevIceMax,prevIcePMin,prevIcePMax)) - { - AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevIceMin+","+prevIcePMin+","+prevIcePMax+","+prevIceMax+" vs "+seaiceEnv.toString()); - if (countchunks>0) - addingElements+=","; - addingElements+=seaiceEnv.toString(); - countchunks++; - } - if (!landdistEnv.checkPrevious(prevLanddistMin,prevLanddistMax,prevLanddistPMin,prevLanddistPMax)) - { - AnalysisLogger.getLogger().warn("DIFFERENCE ON SPECIES: "+species+" - "+prevLanddistMin+","+prevLanddistPMin+","+prevLanddistPMax+","+prevLanddistPMax+" vs "+landdistEnv.toString()); - if (countchunks>0) - addingElements+=","; - addingElements+=landdistEnv.toString(); - countchunks++; - } - - //build up envelope set - EnvelopeSet envSet = new EnvelopeSet(); - envSet.addEnvelope(tempEnv.toEnvelope(EnvelopeName.TEMPERATURE)); - envSet.addEnvelope(salinityEnv.toEnvelope(EnvelopeName.SALINITY)); - envSet.addEnvelope(primprodEnv.toEnvelope(EnvelopeName.PRIMARY_PRODUCTION)); - envSet.addEnvelope(seaiceEnv.toEnvelope(EnvelopeName.ICE_CONCENTRATION)); - envSet.addEnvelope(landdistEnv.toEnvelope(EnvelopeName.LAND_DISTANCE)); - - envSet.setEnvelopeString(addingElements); - - return envSet; - - } - - - public static EnvelopeSet calculateEnvelopes(String species, SessionFactory vreConnection, String occurrencePointsTable, String HcafTable, Object[] singleSpeciesValues){ - - String dynamicSelectValues = selectValues.replace("%OCCURRENCEPOINTS%", occurrencePointsTable).replace("%HCAF%", HcafTable); - String layer = getElement(singleSpeciesValues,1); - - String TemperatureField = "SSTAnMean"; - String SalinityField = "SalinityMean"; - String PrimProdField = "PrimProdMean"; - String IceField = "IceConAnn"; - String LanddistField = "LandDist"; - - if ((layer != null)&&(layer.equals("b"))){ - TemperatureField = "SBTAnMean"; - SalinityField = "SalinityBMean"; - } - - String TemperatureQuery = String.format(dynamicSelectValues,TemperatureField,species); - String SalinityQuery = String.format(dynamicSelectValues,SalinityField,species); - String PrimProdQuery = String.format(dynamicSelectValues,PrimProdField,species); - String IceQuery = String.format(dynamicSelectValues,IceField,species); - String LanddistQuery = String.format(dynamicSelectValues,LanddistField,species); - - System.out.println(TemperatureQuery); - List tempvalues = DatabaseFactory.executeSQLQuery(TemperatureQuery, vreConnection); - List salinityvalues = DatabaseFactory.executeSQLQuery(SalinityQuery, vreConnection); - List primprodvalues = DatabaseFactory.executeSQLQuery(PrimProdQuery, vreConnection); - List icevalues = DatabaseFactory.executeSQLQuery(IceQuery, vreConnection); - List landdistvalues = DatabaseFactory.executeSQLQuery(LanddistQuery, vreConnection); - - //build up envelope set - EnvelopeSet envSet = calcEnv(species,singleSpeciesValues,tempvalues,salinityvalues,primprodvalues,icevalues,landdistvalues); - - return envSet; - - } - - public Object[] hspen2ObjectArray(Hspen hspen) { - - //convert hspen to object array - Object [] singleHspen = new Object[22]; - singleHspen[0] = hspen.getSpeciesID();singleHspen[1] = hspen.getLayer(); - - singleHspen[2] = hspen.getIceConcentration().getMin();singleHspen[3] = hspen.getIceConcentration().getMax(); - singleHspen[4] = hspen.getIceConcentration().getPrefmin();singleHspen[5] = hspen.getIceConcentration().getPrefmax(); - - singleHspen[6] = hspen.getSalinity().getMin();singleHspen[7] = hspen.getSalinity().getMax(); - singleHspen[8] = hspen.getSalinity().getPrefmin();singleHspen[9] = hspen.getSalinity().getPrefmax(); - - singleHspen[10] = hspen.getLandDistance().getMin();singleHspen[11] = hspen.getLandDistance().getMax(); - singleHspen[12] = hspen.getLandDistance().getPrefmin();singleHspen[13] = hspen.getLandDistance().getPrefmax(); - - singleHspen[14] = hspen.getTemperature().getMin();singleHspen[15] = hspen.getTemperature().getMax(); - singleHspen[16] = hspen.getTemperature().getPrefmin();singleHspen[17] = hspen.getTemperature().getPrefmax(); - - singleHspen[18] = hspen.getPrimaryProduction().getMin();singleHspen[19] = hspen.getPrimaryProduction().getMax(); - singleHspen[20] = hspen.getPrimaryProduction().getPrefmin();singleHspen[21] = hspen.getPrimaryProduction().getPrefmax(); - - return singleHspen; - } - - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Coordinates.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Coordinates.java deleted file mode 100644 index b33f564..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Coordinates.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -public class Coordinates { - - private String name; - private String NMostLat; - private String SMostLat; - private String WMostLong; - private String EMostLong; - private String maxCenterLat; - private String minCenterLat; - - public Coordinates(String nmostLat,String smostLat,String wmostLong,String emostLong,String maxCenterLat,String minCenterLat){ - NMostLat = nmostLat; - SMostLat = smostLat; - WMostLong = wmostLong; - EMostLong = emostLong; - maxCenterLat = maxCenterLat; - minCenterLat = minCenterLat; - } - public void setNMostLat(String nMostLat) { - NMostLat = nMostLat; - } - public String getNMostLat() { - return NMostLat; - } - public void setSMostLat(String sMostLat) { - SMostLat = sMostLat; - } - public String getSMostLat() { - return SMostLat; - } - public void setWMostLong(String wMostLong) { - WMostLong = wMostLong; - } - public String getWMostLong() { - return WMostLong; - } - public void setEMostLong(String eMostLong) { - EMostLong = eMostLong; - } - public String getEMostLong() { - return EMostLong; - } - public void setMaxCenterLat(String maxCenterLat) { - maxCenterLat = maxCenterLat; - } - public String getMaxCenterLat() { - return maxCenterLat; - } - public void setMinCenterLat(String minCenterLat) { - minCenterLat = minCenterLat; - } - public String getMinCenterLat() { - return minCenterLat; - } - public void setName(String name) { - this.name = name; - } - public String getName() { - return name; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Envelope.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Envelope.java deleted file mode 100644 index a26b0b2..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Envelope.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - - -public class Envelope { - - private EnvelopeName name; - private String min; - private String prefmin; - private String prefmax; - private String max; - - public Envelope(){ - - } - public Envelope(String min,String prefMin,String prefMax,String max){ - this.min = min; - this.prefmin = prefMin; - this.prefmax = prefMax; - this.max = max; - } - - public void setMin(String min) { - this.min = min; - } - public String getMin() { - return min; - } - public void setPrefmin(String prefmin) { - this.prefmin = prefmin; - } - public String getPrefmin() { - return prefmin; - } - public void setPrefmax(String prefmax) { - this.prefmax = prefmax; - } - public String getPrefmax() { - return prefmax; - } - public void setMax(String max) { - this.max = max; - } - public String getMax() { - return max; - } - public void setName(EnvelopeName name) { - this.name = name; - } - public EnvelopeName getName() { - return name; - } - - - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeModel.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeModel.java deleted file mode 100644 index 13e3e5a..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeModel.java +++ /dev/null @@ -1,7 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -public enum EnvelopeModel { - - AQUAMAPS, - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeName.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeName.java deleted file mode 100644 index fbb403d..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeName.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -public enum EnvelopeName { - - TEMPERATURE, - SALINITY, - PRIMARY_PRODUCTION, - ICE_CONCENTRATION, - LAND_DISTANCE -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeSet.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeSet.java deleted file mode 100644 index 3e94a70..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/EnvelopeSet.java +++ /dev/null @@ -1,37 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.ArrayList; -import java.util.List; - -public class EnvelopeSet { - - private List envelopes; - private String envelopeString; - - public EnvelopeSet(){ - envelopes = new ArrayList(); - envelopeString = ""; - } - - public void setEnvelopeString(String envelopeString) { - this.envelopeString = envelopeString; - } - - public String getEnvelopeString() { - return envelopeString; - } - - public void setEnvelopes(List envelopes) { - this.envelopes = envelopes; - } - - public List getEnvelopes() { - return envelopes; - } - - public void addEnvelope(Envelope e){ - envelopes.add(e); - } - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Hspen.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Hspen.java deleted file mode 100644 index 889be78..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/Hspen.java +++ /dev/null @@ -1,127 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - - -public class Hspen { - - private String speciesID; - private Envelope depth; - private String meanDepth; - private boolean pelagic; - private String layer; - private Envelope temperature; - private Envelope salinity; - private Envelope primaryProduction; - private Envelope iceConcentration; - private boolean landDistanceYN; - private Envelope landDistance; - private Coordinates coordinates; - private String faoAreas; - - public void setDepth(Envelope depth) { - this.depth = depth; - } - public Envelope getDepth() { - return depth; - } - public void setMeanDepth(String meanDepth) { - this.meanDepth = meanDepth; - } - public String getMeanDepth() { - return meanDepth; - } - public void setPelagic(boolean pelagic) { - this.pelagic = pelagic; - } - public boolean isPelagic() { - return pelagic; - } - public void setLayer(String layer) { - this.layer = layer; - } - public String getLayer() { - return layer; - } - public void setTemperature(Envelope temperature) { - this.temperature = temperature; - } - public Envelope getTemperature() { - return temperature; - } - public void setSalinity(Envelope salinity) { - this.salinity = salinity; - } - public Envelope getSalinity() { - return salinity; - } - public void setPrimaryProduction(Envelope primaryProduction) { - this.primaryProduction = primaryProduction; - } - public Envelope getPrimaryProduction() { - return primaryProduction; - } - public void setIceConcentration(Envelope iceConcentration) { - this.iceConcentration = iceConcentration; - } - public Envelope getIceConcentration() { - return iceConcentration; - } - public void setLandDistanceYN(boolean landDistanceYN) { - this.landDistanceYN = landDistanceYN; - } - public boolean isLandDistanceYN() { - return landDistanceYN; - } - public void setLandDistance(Envelope landDistance) { - this.landDistance = landDistance; - } - public Envelope getLandDistance() { - return landDistance; - } - public void setCoordinates(Coordinates coordinates) { - this.coordinates = coordinates; - } - public Coordinates getCoordinates() { - return coordinates; - } - public void setFaoAreas(String faoAreas) { - this.faoAreas = faoAreas; - } - public String getFaoAreas() { - return faoAreas; - } - - - - - public Object[] toObjectArray(){ - Object[] array = new Object[33]; - array[0] = depth.getMin();array[1] = meanDepth; array[2] = depth.getPrefmin(); - array[3] = (pelagic)?1:0; - array[4] = depth.getPrefmax(); array[5] = depth.getMax(); - array[6] = temperature.getMin(); - array[7] = layer; - array[8] = temperature.getPrefmin();array[9] = temperature.getPrefmax();array[10] = temperature.getMax(); - array[11] = salinity.getMin();array[12] = salinity.getPrefmin();array[13] = salinity.getPrefmax();array[14] = salinity.getMax(); - array[15] = primaryProduction.getMin();array[16] = primaryProduction.getPrefmin();array[17] = primaryProduction.getPrefmax();array[18] = primaryProduction.getMax(); - array[19] = iceConcentration.getMin();array[20] = iceConcentration.getPrefmin();array[21] = iceConcentration.getPrefmax();array[22] = iceConcentration.getMax(); - array[23] = (landDistanceYN)?1:0; - array[24] = landDistance.getMin();array[25] = landDistance.getPrefmin();array[26] = landDistance.getPrefmax();array[27] = landDistance.getMax(); - array[28] = coordinates.getNMostLat();array[29] = coordinates.getSMostLat();array[30] = coordinates.getWMostLong();array[31] = coordinates.getEMostLong(); - array[32] = faoAreas; - return array; - } - - public Object[] latitudeExtent(){ - Object[] array = new Object[2]; - array[0] = coordinates.getMaxCenterLat(); - array[1] = coordinates.getMinCenterLat(); - return array; - } - public void setSpeciesID(String speciesID) { - this.speciesID = speciesID; - } - public String getSpeciesID() { - return speciesID; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/OccurrencePoint.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/OccurrencePoint.java deleted file mode 100644 index 3a96f84..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/OccurrencePoint.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -public class OccurrencePoint { - private String speciesID; - private String cSquareCode; - private Double value; - - public OccurrencePoint(String speciesID,String cSquareCode,Double value){ - this.speciesID=speciesID; - this.cSquareCode= cSquareCode; - this.value = value; - } - - public OccurrencePoint(String cSquareCode, Double value){ - this.cSquareCode= cSquareCode; - this.value = value; - } - - public OccurrencePoint(Double value){ - this.speciesID=""; - this.cSquareCode= ""; - this.value = value; - } - - public void setSpeciesID(String speciesID) { - this.speciesID = speciesID; - } - public String getSpeciesID() { - return speciesID; - } - public void setCsquareCode(String csquareCode) { - this.cSquareCode = csquareCode; - } - public String getCsquareCode() { - return cSquareCode; - } - public void setValue(Double value) { - this.value = value; - } - public Double getValue() { - return value; - } - - - - public Object[] toObjectArray(){ - Object[] array = new Object[3]; - array [0] = cSquareCode; - array [1] = speciesID; - array [2] = value; - - return array; - } -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/OccurrencePointSets.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/OccurrencePointSets.java deleted file mode 100644 index 977507c..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/OccurrencePointSets.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class OccurrencePointSets { - - private Map> occurrenceMap; - - public OccurrencePointSets(){ - occurrenceMap = new HashMap>(); - } - - public void setOccurrenceMap(Map> occurrenceMap) { - this.occurrenceMap = occurrenceMap; - } - - public Map> getOccurrenceMap() { - return occurrenceMap; - } - - public void addOccurrencePointList(String name,List pointsList){ - occurrenceMap.put(name, pointsList); - } - - public void addOccurrencePointList(EnvelopeModel name,List pointsList){ - occurrenceMap.put(""+name, pointsList); - } - - public void addOccurrencePoint(String name,OccurrencePoint occurrencePoint){ - List occurrenceList = occurrenceMap.get(name); - occurrenceList.add(occurrencePoint); - } - - public void addOccurrencePoint(EnvelopeModel name,OccurrencePoint occurrencePoint){ - List occurrenceList = occurrenceMap.get(""+name); - occurrenceList.add(occurrencePoint); - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_landdist.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_landdist.java deleted file mode 100644 index 33c78de..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_landdist.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.List; - -public class SpEnv_landdist extends AquamapsEnvelope { - - /* -$strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.LandDist -FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode -WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "' -AND HCAF.LandDist <> -9999 -AND HCAF.LandDist is not null -AND HCAF.OceanArea > 0 -AND $oc_var.inc = 'y' -ORDER BY HCAF.LandDist"; - */ - - public void calcEnvelope(List speciesOccurrences){ - calculatePercentiles(speciesOccurrences, $LandUpper, $LandLower); - - //check if envelope is as broad as pre-defined minimum - if (PMax - PMin < 2) - { - double $ParaMid = (PMin + PMax) / Double.valueOf(2); - double $PMinTmp = $ParaMid - 1; - double $PMaxTmp = $ParaMid + 1; - - //enforce a minimum preferred range as long as it doesn't extrapolate outer limits - if ($PMinTmp < Min) {//preferred Min value as is - } - else {PMin = $PMinTmp;} - - if ($PMaxTmp > Max) { //preferred Max value as is - } - else {PMax = $PMaxTmp;} - } - -// /check difference between min/max and pref. min/max - if (PMin - Min < 1) - { - double $MinTmp = PMin - 1; - if ($MinTmp > $LandLower) {Min = $MinTmp;} - else {Min = $LandLower;} - } - - if (Max - PMax < 1) - { - double $MaxTmp = PMax + 1; - if ($MaxTmp < $LandUpper) {Max = $MaxTmp;} - else {Max = $LandUpper;} - } - } - - - public String toString(){ - String exitString = "landdistmin='"+Min+"'," + - "landdistprefmin='"+PMin+"'," + - "landdistprefmax='"+PMax+"'," + - "landdistmax='"+Max+"'"; - - return exitString; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_primprod.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_primprod.java deleted file mode 100644 index 3bd3763..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_primprod.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.List; - -public class SpEnv_primprod extends AquamapsEnvelope{ - - /* -$strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.PrimProdMean -FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode -WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "' -AND HCAF.PrimProdMean is not null -AND HCAF.OceanArea > 0 -AND $oc_var.inc = 'y' -ORDER BY HCAF.PrimProdMean"; - */ - - public void calcEnvelope(List speciesOccurrences){ - - calculatePercentiles(speciesOccurrences, $ProdUpper, $ProdLower); - - //check if envelope is as broad as pre-defined minimum - if (PMax - PMin < 2) - { - double $ParaMid = (PMin + PMax) / Double.valueOf(2); - double $PMinTmp = $ParaMid - 1; - double $PMaxTmp = $ParaMid + 1; - - //enforce a minimum preferred range as long as it doesn't extrapolate outer limits - if ($PMinTmp < Min) {//preferred Min value as is - } - else {PMin = $PMinTmp;} - - if ($PMaxTmp > Max) {//preferred Max value as is - } - else {PMax = $PMaxTmp;} - } - - //check difference between min/max and pref. min/max - if (PMin - Min < 1) - { - double $MinTmp = PMin - 1; - if ($MinTmp > $ProdLower) {Min = $MinTmp;} - else {Min = $ProdLower;} - } - - if (Max - PMax < 1) - { - double $MaxTmp = PMax + 1; - if ($MaxTmp < $ProdUpper) {Max = $MaxTmp;} - else {Max = $ProdUpper;} - } - - } - - public String toString(){ - String exitString = "primprodmin='"+Min+"'," + - "primprodprefmin='"+PMin+"'," + - "primprodprefmax='"+PMax+"'," + - "primprodmax='"+Max+"'"; - - return exitString; - } - - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_salinity.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_salinity.java deleted file mode 100644 index 9d6248b..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_salinity.java +++ /dev/null @@ -1,85 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.List; - -public class SpEnv_salinity extends AquamapsEnvelope{ - - /* - $strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.$fld -FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode -WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "' -AND HCAF.$fld <> -9999 -AND HCAF.$fld is not null -AND HCAF.OceanArea > 0 -AND $oc_var.inc = 'y' -ORDER BY HCAF.$fld"; - */ - - public void calcEnvelope(List speciesOccurrences, String $layer){ - - double $SalinUp; - double $SalinLow; - if ($layer.equals("s")) - { - $SalinUp = $SalinUpper; - $SalinLow = $SalinLower; - } - else if ($layer.equals("b")) - { - $SalinUp = $SalinBUpper; //reset absolute min and max for bottom - $SalinLow = $SalinBLower; - } - else - { - $SalinUp = $SalinUpper; - $SalinLow = $SalinLower; - } - - calculatePercentiles(speciesOccurrences, $SalinUp, $SalinLow); - - //check if envelope is as broad as pre-defined minimum - if (PMax - PMin < 1) - { - double $ParaMid = (PMin + PMax) / Double.valueOf(2); - double $PMinTmp = $ParaMid - 0.5; - double $PMaxTmp = $ParaMid + 0.5; - - //enforce a minimum preferred range as long as it doesn't extrapolate outer limits - if ($PMinTmp < Min) { - // preferred Min value as is - } - else {PMin = $PMinTmp;} - - if ($PMaxTmp > Max) {//preferred Max value as is - } - else {PMax = $PMaxTmp;} - } - - //check difference between min/max and pref. min/max - if (PMin - Min < 0.5) - { - double $MinTmp = PMin - 0.5; - if ($MinTmp > $SalinLower) {Min = $MinTmp;} - else {Min = $SalinLower;} - } - - if (Max - PMax < 0.5) - { - double $MaxTmp = PMax + 0.5; - if ($MaxTmp < $SalinUpper) {Max = $MaxTmp;} - else {Max = $SalinUpper;} - } - - } - - - public String toString(){ - String exitString = "salinitymin='"+Min+"'," + - "salinityprefmin='"+PMin+"'," + - "salinityprefmax='"+PMax+"'," + - "salinitymax='"+Max+"'"; - - return exitString; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_seaice.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_seaice.java deleted file mode 100644 index 6c8f002..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_seaice.java +++ /dev/null @@ -1,63 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.List; - -public class SpEnv_seaice extends AquamapsEnvelope{ - - /* -$strSQL="SELECT DISTINCT $oc_var.CsquareCode, $oc_var.SpeciesID, HCAF.IceConAnn -FROM $oc_var INNER JOIN HCAF ON $oc_var.CsquareCode = HCAF.CsquareCode -WHERE $oc_var.SpeciesID = '" . $row['SpeciesID'] . "' -AND HCAF.IceConAnn is not null -AND HCAF.OceanArea > 0 -AND $oc_var.inc = 'y' -ORDER BY HCAF.IceConAnn"; -*/ - //################################################################################### - //This file re-computes the temperature values (Min, PrefMin, Max, PrefMax based on - //area restriction parameters set by the user - //################################################################################### - public void calcEnvelope(List speciesOccurrences){ - - calculatePercentiles(speciesOccurrences, null, null); - - //per KK and JR: extend IceMin - avoid exclusion of species from all non-ice covered areas - double $adjVal = -1; double $sumIce = 0; double $meanIce = 0; - //fix to -1 per KK (Me!AdjustIce value taken from form input) - - //Mods May 2010: treat values <.01 as zero; per KK; revised during comparison with D4S2 Proj - if (Min < 0.01) - Min = 0.00; - - if (Min == 0) - { -// $paramData = $conn->query($strSQL); - $sumIce = 0; - - int $reccount = speciesOccurrences.size(); - for (int i=0 ; i< $reccount ;i++){ - Object[] $row = (Object[])speciesOccurrences.get(i); - double $IceConn = AquamapsEnvelopeAlgorithm.getNumber($row,2); - //ice concentration - $sumIce = $sumIce + $IceConn; - } - - if($reccount != 0) {$meanIce = Double.valueOf($sumIce) / Double.valueOf($reccount);} - else {$meanIce = 0;} - - Min = $adjVal + $meanIce; - } - - } - - - public String toString(){ - String exitString = "iceconmin='"+Min+"'," + - "iceconprefmin='"+PMin+"'," + - "iceconprefmax='"+PMax+"'," + - "iceconmax='"+Max+"'"; - - return exitString; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_temp.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_temp.java deleted file mode 100644 index 19b50af..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/aquamaps/SpEnv_temp.java +++ /dev/null @@ -1,89 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.aquamaps; - -import java.util.List; - -public class SpEnv_temp extends AquamapsEnvelope { - - /* - $strSQL="SELECT DISTINCT speciesoccursum.CsquareCode, speciesoccursum.SpeciesID, HCAF.$fld - FROM speciesoccursum INNER JOIN HCAF ON speciesoccursum.CsquareCode = HCAF.CsquareCode - WHERE speciesoccursum.SpeciesID = ' .. ' - AND HCAF.$fld <> -9999 - AND HCAF.$fld is not null - AND HCAF.OceanArea > 0 - AND speciesoccursum.inc = 'y' - ORDER BY HCAF.$fld"; -*/ - //################################################################################### - //This file re-computes the temperature values (Min, PrefMin, Max, PrefMax based on - //area restriction parameters set by the user - //################################################################################### - public void calcEnvelope(String $layer, List speciesOccurrences){ - - calculatePercentiles(speciesOccurrences, $TempUpper, $TempLower); - - double $spreadVal = 0; - if (Max <= 5) //then polar and deepwater species - { $spreadVal = 0.25; } - else { $spreadVal = 1; } - - - if ((PMax - PMin) < $spreadVal) - { - double $ParaMid = (PMin + PMax) / 2f; - double $PMinTmp = $ParaMid - ($spreadVal / 2f); - double $PMaxTmp = $ParaMid + ($spreadVal / 2f); - - //enforce a minimum preferred range as long as it doesn't extrapolate outer limits - if ($PMinTmp < Min) - { - //preferred Min value as is - } - else - { - PMin = $PMinTmp; - } - - if ($PMaxTmp > Max) - { - //preferred Max value as is - } - else - { - PMax = $PMaxTmp; - } - } - - //check difference between min/max and pref. min/max - if (PMin - Min < 0.5) - { - double $MinTmp = PMin - 0.5; - if ($MinTmp > $TempLower){Min = $MinTmp;} - else {Min = $TempLower;} - } - - if (Max - PMax < 0.5) - { - double $MaxTmp = PMax + 0.5; - if ($MaxTmp < $TempUpper){Max = $MaxTmp;} - else {Max = $TempUpper;} - } - //check if envelope is as broad as pre-defined minimum - if (PMax >= 25) - { - Max = PMax + 4.2; - } - - } - - - public String toString(){ - String exitString = "tempmin='"+Min+"'," + - "tempprefmin='"+PMin+"'," + - "tempprefmax='"+PMax+"'," + - "tempmax='"+Max+"'"; - - return exitString; - } - -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/Neural_Network.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/Neural_Network.java deleted file mode 100644 index 6facd16..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/Neural_Network.java +++ /dev/null @@ -1,570 +0,0 @@ -//Classe neural_network:le sue istanze sono reti neurali feed forward multistato -//implementate come una "griglia" di istanze della classe Neuron.E' possibile scegliere -//il numero di layer e quello dei neuroni per ognuno di questi. - -package org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks; - -import java.io.Serializable; - -public class Neural_Network implements Serializable { - // VARIABILE DI CLASSE: - - // La variabile griglia è un array bidimensionale:ogni riga è un layer di - // neuroni - // ad ognuno dei quali è associato un vettore di pesi di lughezza uguale al - // numero - // di neuroni nella riga successiva. - - // N.B.:nel layer di input e in quelli intermedi viene creato un neurone "in - // più" - // che funge da "bias" per quelli successivi (ha uscita sempre uguale ad uno - // ma - // pesi diversi per ogni neurone a cui si collega). - - Neuron[][] griglia; - static final long serialVersionUID = 1; - // originale = 1.2 - double soglia = 0.001; - double maxcycle = 1000; - double acceptanceThr = 0.5; - - public void setThreshold(double soglia) { - this.soglia = soglia; - } - - public void setAcceptanceThreshold(double treshold) { - this.acceptanceThr = treshold; - } - - public void setCycles(double cycs) { - this.maxcycle = cycs; - } - - public static enum ACTIVATIONFUNCTION { - HEAVYSIDE, SIGMOID, IDENTITY - } - - public Neural_Network(int N, int M, ACTIVATIONFUNCTION attifun) { - this(N, M, attifun.ordinal() + 1); - } - - public Neural_Network(int N, int M, ACTIVATIONFUNCTION attifun, float[] V) { - this(N, M, attifun.ordinal() + 1, V); - } - - public static double[] preprocessObjects(Object[] vector) { - - double[] out = new double[vector.length]; - - for (int i = 0; i < vector.length; i++) { - double element = 0; - if (vector[i] != null) - element = Double.parseDouble("" + vector[i]); - - if (element == 0) - element = 0.1; - - out[i] = element; - } - - return out; - } - - public static double[] preprocessObjects(double[] vector) { - - double[] out = new double[vector.length]; - - for (int i = 0; i < vector.length; i++) { - double element = vector[i]; - if (element == 0) - element = 0.1; - - out[i] = element; - } - - return out; - } - - - public double[] getPositiveCase() { - double[] out = new double[0]; - - if (griglia.length > 0) { - out = new double[griglia[griglia.length - 1].length]; - for (int i = 0; i < out.length; i++) { - out[i] = 1f; - } - } - - return out; - } - - public double[] getNegativeCase() { - double[] out = new double[0]; - - if (griglia.length > 0) { - out = new double[griglia[griglia.length - 1].length]; - for (int i = 0; i < out.length; i++) { - out[i] = 0.0f; - } - } - - return out; - } - - public static int[] setupInnerLayers(int... numberOfNeurons) { - int[] layers = null; - - if (numberOfNeurons.length > 0) { - layers = new int[numberOfNeurons.length]; - - for (int i = 0; i < numberOfNeurons.length; i++) { - layers[i] = numberOfNeurons[i]; - } - } - return layers; - } - - public Neural_Network(int N, int M, int[] t, ACTIVATIONFUNCTION attifun) { - this(N, M, t, attifun.ordinal() + 1); - } - - // COSTRUTTORI: - - // Costruttore della rete nel caso di assenza di nodi neuroni nascosti - // :costruisce - // una rete neurale feed forward ad uno stato.in ingresso bisogna - // specificare la - // funzione di attivazione dei neuroni di output - - public Neural_Network(int N, int M, int attifun) { - // N è il numero di neuroni di input - // M è il numero di neuroni di output - // attifun è il codice della funzione di attivazione di un neurone - this.griglia = new Neuron[2][]; - - // creazione dei neuroni di input:sono neuroni aventi l'identità come - // funzione di attivazione - Neuron[] input = new Neuron[N + 1]; - input[0] = new Neuron(M, 4); - for (int i = 1; i < N + 1; i++) { - input[i] = new Neuron(M, 3); - } - // La prima riga di "griglia" viene uguagliata al vettore di neuroni - // appena creato - this.griglia[0] = input; - - // creazione dei neuroni di output:sono neuroni aventi la corrispondente - // ad "attifun" come funzione di attivazione - Neuron[] output = new Neuron[M]; - for (int i = 0; i < M; i++) { - output[i] = new Neuron(0, attifun); - } - this.griglia[1] = output; - }// fine primo costruttore - - // Secondo costruttore:E' uguale a quello i prima ma permette all'utente di - // inizializzare a piacimento il vettore dei pesi dell'input - - public Neural_Network(int N, int M, int attifun, float[] V) { - griglia = new Neuron[2][]; - // creazione dei neuroni di input: - Neuron[] input = new Neuron[N + 1]; - input[0] = new Neuron(M, 4); - for (int i = 1; i < N + 1; i++) { - input[i] = new Neuron(M, 3, V); - } - this.griglia[0] = input; - // creazione dei neuroni di output: - Neuron[] output = new Neuron[M]; - for (int i = 0; i < M; i++) { - output[i] = new Neuron(0, attifun); - } - this.griglia[1] = output; - }// fine secondo costruttore - - // Terzo costruttore,caso di più strati:viene generata una rete con un - // numero di - // strati nascosti e un numero di neuroni per ogni strato,stabiliti - // dall'utente - - // Il vettore "t", in ingresso,ha come lunghezza il numero di strati interni - // e il suo - // i-esimo elemento è un numero intero che indica quanti neuroni ci sono - // nell'i-esimo - // layer nascosto della rete. - public Neural_Network(int N, int M, int[] t, int attifun) { - - griglia = new Neuron[t.length + 2][]; - // t.length è il numero di strati nascosti a cui vanno aggiunti quello - // di input e - // di output - - // creazione dei neuroni di input:come nel primo costruttore - Neuron[] input = new Neuron[N + 1]; - input[0] = new Neuron(t[0], 4); - for (int i = 1; i < N + 1; i++) { - input[i] = new Neuron(t[0], 3); - } - this.griglia[0] = input; - - // creazione dei neuroni hidden:hanno una funzione di attivazione uguale - // a quella degli output - // N.B.:per ogni strato nascosto viene generato anche un neurone "bias" - Neuron[] aux; - for (int i = 0; i < t.length; i++) { - aux = new Neuron[t[i] + 1]; - if (i != t.length - 1) { - // creazione di un neurone bias al posto 0 - aux[0] = new Neuron(t[i + 1], 4); - // creazione del resto dei neuroni - for (int g = 1; g < (t[i] + 1); g++) { - aux[g] = new Neuron(t[i + 1], attifun); - } - } else { - // neurone bias - aux[0] = new Neuron(M, 4); - for (int j = 1; j < t[i] + 1; j++) { - // creazione del resto dei neuroni nel layer precedente a - // quello di output - aux[j] = new Neuron(M, attifun); - } - } - - this.griglia[i + 1] = aux; - } - - // creazione dei neuroni di output: - Neuron[] output = new Neuron[M]; - for (int i = 0; i < M; i++) { - output[i] = new Neuron(0, attifun); - } - this.griglia[t.length + 1] = output; - - } - - // METODI DI ISTANZA - - /* - * Il metodo "propagate" propaga l'input della rete neurale fino ai neuroni di output.Fa uso del metodo prop - */ - public double[] propagate(double[] input) { - if (input.length == griglia[0].length - 1) - return prop(input, 0); - else - - System.out.println("Errore:numero di input non valido! "); - return null; - } - - /* - * Espleta le funzioni di propagate,ma ha in ingresso anche un intero,che rappresenta lo strato in analisi ed utile per la ricorsione - */ - private double[] prop(double[] input, int i) { - double multip; - // la variabile multip è il risultato della somma pesata degli output - // dei neuroni "superiori" - double[] arrayaux; - // arrayaux è un vettore contenente gli input a tutti i neuroni del - // layer i+1-esimo, - // calcolati come somme pesate degli output dei neuroni dello strato - // i-esimo. - - if (griglia[i][0].W.length != 0) { - arrayaux = new double[griglia[i][0].W.length]; - for (int j = 0; j < griglia[i][0].W.length; j++) - - { - multip = griglia[i][0].W[j]; - for (int g = 1; g < griglia[i].length; g++) { - multip += griglia[i][g].W[j] * griglia[i][g].generaOutput(input[g - 1]); - } - arrayaux[j] = multip; - } - // chiamata ricorsiva:il vettore arrayaux è l'input per lo strato - // i+1-esimo - return prop(arrayaux, i + 1); - } else - // questo è il caso in cui siamo giunti all'ultimo strato della rete e - // dunque il metodo - // deve restituire le uscite dei neuroni di output - { - arrayaux = new double[griglia[i].length]; - for (int j = 0; j < griglia[i].length; j++) - arrayaux[j] = griglia[i][j].generaOutput(input[j]); - - return arrayaux; - } - - }// fine metodo prop - - /* - * Il metodo trainPropagate è una variante di propagate sfruttata nell'addestramento della rete:propaga l'input di training fino ai neuroni di output generando, inoltre , per ogni strato "i", un vettore contenente le entrate dei neuroni dello strato i+1-esimo. I vari vettori prodotti vengono, alla fine, inseriti in un array bidimensionale - */ - // N.B.:anche qui si fa uso di un "sottometodo", trainprop,a fini - // implementativi - private double[][] trainpropagate(double[] input) { - double[][] arrayout = new double[griglia.length + 1][]; - if (input.length == griglia[0].length - 1) - // nella prima riga del vettore restituito da questo metodo c'è l'input - // della rete - { - arrayout[0] = input; - for (int i = 0; i < griglia.length; i++) - // nelle altre righe ci sono i risultati di trainpropagate. - arrayout[i + 1] = trainprop(arrayout[i], i); - // nell'ultima riga ci sono i valori dei neuroni di output - return arrayout; - } else - System.out.println("Errore:numero di input non valido! "); - return null; - }// fine metodo trainpropagate - - /* - * Genera un vettore contenente gli input ad ogni neurone dell'i+1-esimo layer - */ - private double[] trainprop(double[] input, int i) { - // lo schema è simile a quello del metodo "prop" - double multip; - double[] arrayaux; - - if (griglia[i][0].W.length != 0) { - arrayaux = new double[griglia[i][0].W.length]; - for (int j = 0; j < griglia[i][0].W.length; j++) - - { - multip = griglia[i][0].W[j]; - for (int g = 1; g < griglia[i].length; g++) { - multip += griglia[i][g].W[j] * griglia[i][g].generaOutput(input[g - 1]); - } - arrayaux[j] = multip; - } - - return arrayaux; - } else { - arrayaux = new double[griglia[i].length]; - for (int j = 0; j < griglia[i].length; j++) - arrayaux[j] = griglia[i][j].generaOutput(input[j]); - - return arrayaux; - } - }// fine trainprop - - // BACK PROPAGATION: - - /* - * Il metodo BProp avvia l'algoritmo di Back Propagation per la correzione dei pesi della rete.Il metodo è dichiarato private perchè all'interno del programma funge da metodo ausiliario per "train" - */ - - private void BProp(double[] input, double[] realvalues) { - - // Variabili del metodo: - double[][] Ai = trainpropagate(input); - // Ai contenente gli input di tutti i neuroni, strato per strato, ed - // anche gli output - // della rete - int lungtrain = Ai.length; - // lungtrain registra la lunghezza di Ai - double[][] arraydelta = new double[griglia.length - 1][]; - // arraydelta contiene le derivate (i "delta") dell'errore di - // approssimazione - // rispetto agli ingressi dei neuroni dal primo strato nascosto fino - // agli output - // (i "delta" degli input non servono ai fini della Backpropagation) - int lungdelta = this.griglia.length - 1; - // il numero di righe di arraydelta è di un'unità inferiore a quello - // della griglia - - // Corpo del metodo: - // l'ultima riga di arraydelta viene definita come un vettore di - // lunghezza pari al - // numero di output,infatti c'è un "delta" per ogni uscita - arraydelta[lungdelta - 1] = new double[Ai[lungtrain - 1].length]; - // si procede, ora, al calcolo effettivo dei "delta" di output: - - for (int i = 0; i < Ai[lungtrain - 1].length; i++) { - double Yk = Ai[lungtrain - 1][i];// uscita dell'i-esimo neurone - // di output - double Ak = Ai[lungtrain - 2][i];// ingresso dell'i-esimo neurone - // di output - double Tk = realvalues[i];// valore atteso dell'i-esimo neurone di - // output - // forma analitica del "delta" dell'i-esimo neurone di output: - double Dk = (Yk - Tk) * (1 / (1 + Math.exp(-1 * Ak))) * (1 - ((1 / (1 + Math.exp(-1 * Ak))))); - arraydelta[lungdelta - 1][i] = Dk;// il "delta" viene posto al - // posto i-esimo dell'ultima - // riga di arraydelta - } - - // Gli altri "delta" sono calcolati a partire da quelli di output in una - // "propagazione all'indietro": - // "g" è l'indice dello strato della rete in analisi - // "j" è l'indice del neurone in analisi nello strato g - // "k" scorre sugli indici dei neuroni nello strato successivo - for (int g = lungdelta - 2; g >= 0; g--) { - arraydelta[g] = new double[Ai[g + 1].length]; - - double[] DKnext = arraydelta[g + 1]; - for (int j = 0; j < Ai[g + 1].length; j++) { - double Ak = Ai[g + 1][j]; - double somma = 0; - for (int k = 0; k < arraydelta[g + 1].length; k++) { - // calcolo di una somma pesata dei "delta" dello stato - // successivo,utile per la valutazione del - // delta del neurone j-esimo - somma += griglia[g + 1][j + 1].W[k] * DKnext[k]; - } - // valutazione del delta del neurone j-esimo: - double Dk = somma * (1 / (1 + Math.exp(-1 * Ak))) * (1 - ((1 / (1 + Math.exp(-1 * Ak))))); - // il "delta" viene posto al posto j-esimo della riga g-esima di - // arraydelta - arraydelta[g][j] = Dk; - } - } - - // Correzioni per i pesi tramite i delta calcolati: - for (int g = 0; g < griglia.length - 1; g++) { - float[] D = new float[Ai[g + 1].length]; - // si mettono in D i delta dello strato g+1 - for (int k = 0; k < Ai[g + 1].length; k++) { - D[k] = (float) (0.5f * arraydelta[g][k]); - } - // si aggiornano i pesi dei neuroni di input tramite D - griglia[g][0].aggiornaPesi(D); - // si aggiornano i pesi dei neuroni strato per strato - for (int i = 1; i < griglia[g].length; i++) { - float[] V = new float[Ai[g + 1].length]; - - for (int k = 0; k < Ai[g + 1].length; k++) { - V[k] = (float) (0.5f * (griglia[g][i].generaOutput(Ai[g][i - 1]) * arraydelta[g][k])); - } - - griglia[g][i].aggiornaPesi(V); - } - } - }// Fine Back Propagation - - // Il metodo "train" avvia effettivamente l'addestramento sfruttando la Back - // Propagation - public void train(double[][] inputvet, double[][] correctoutputvet) { - /* - * il metodo ha bisogno di due ingressi: inputvet:un vettore le cui righe sono gli elementi del training set (ad esempio un vettore di features per ogni elemento del Training Set) correctoutputvet:un vettore le cui righe sono i valori attesi di uscita della rete per ogni elemento del Training Set - */ - - // c'è innanzitutto un controllo che il numero degli output attesi - // corrisponda al - // numero di neuroni di uscita della rete - if (griglia[griglia.length - 1].length != correctoutputvet[0].length) - System.out.println("Errore: il vettore degli output " + "NON ha una lunghezza pari " + "a quella dell'output " + "della rete"); - else { - // Si avvia la backpropagation 1000 volte aggiornando i pesi ogni - // volta in base - // agli elementi del training set - // La tecnica di aggiornamento dei pesi è di tipo "On-Line":questi - // vengono modificati - // secondo le derivate dell'errore ad ogni input di addestramento - // introdotto. - /* for (int j = 0;j < 1000;j++) */ - double en = 2; - int counter = 0; - while ((en > soglia) && (counter <= maxcycle)) { - en = 0; - for (int i = 0; i < inputvet.length; i++) { - /* - * double[] arraux = new double[22]; double[] arraux2 = new double[7]; for (int g = 0; g < 22; g++) { arraux[g] = inputvet[i][g];} for (int g = 0; g < 7; g++) { arraux2[g] = correctoutputvet[i][g]; } this.BProp(arraux, arraux2); en += energy(this.propagate(arraux),arraux2); } - */ - this.BProp(inputvet[i], correctoutputvet[i]); - en += energy(this.propagate(inputvet[i]), correctoutputvet[i]); - } - - /* - * System.out.println("inputvet... "); for (int j = 0;j<22;j++){ System.out.print(inputvet[1][j] + " ");} System.out.println(); - * - * System.out.println("inputvet prop... "); for (int j = 0;j<7;j++){ System.out.print(this.propagate(inputvet[1])[j] + " ");} System.out.println(); - * - * System.out.println("correct output... "); for (int j = 0;j<7;j++){ System.out.print(correctoutputvet[1][j] + " ");} System.out.println(); - */ - - System.out.println("errore: " + en); - counter++; - } - System.out.println("Scarto Finale: " + en); - if (counter >= maxcycle) - System.out.println("training incompleto: non sono riuscito ridurre l'errore sotto la soglia!"); - else - System.out.println("training completo!"); - } - }// fine metodo train - - /* - * Il metodo "energy" valuta l'errore quadratico tra due vettori (nel nostro caso l'output della rete e quello atteso) - */ - private double energy(double[] vettore1, double[] vettore2) { - double nrg = (float) Math.pow((vettore1[0] - vettore2[0]), 2); - for (int i = 1; i < vettore2.length; i++) { - nrg = nrg + Math.pow((vettore1[i] - vettore2[i]), 2); - } - return (float) (0.5 * nrg); - }// fine metodo energy - - - - /* - * Il metodo "writeout" stampa a video il numero "1" se l'uscita di un neurone supera il valore di "soglia" e stampa "0" se questo non succede - */ - public void writeout(double numero, double soglia) { - if (numero < soglia) - System.out.println("Uscita : " + 0); - else - System.out.println("Uscita : " + 1); - } - - //classify - public double[] getClassification(double[] out){ - double[] o = new double[out.length]; - for (int i=0;i 0) return 1; - else return 0;} -//Il numero 2 corrisponde alla sigmoide -else -if (attivfun == 2) -{return (1/(1 + Math.exp(-1 * input)));} -/*Il numero 3 corrisponde ad un output uguale all'input (identità). -E' il caso dei neuroni di input */ -if (attivfun == 3) -{return input;} -else -return 1; -} - -/*Il metodo aggiornaPesi aggiorna il vecchio vettore dei pesi del neurone in questione - decrementandone i valori con quelli del vettore in ingresso.E' assunto che il -vettore di ingresso abbia lunghezza uguale a quello dei pesi */ - -public void aggiornaPesi(float[] V) { - for(int i = 0;i < V.length;i++) - W[i] -= V[i]; - } - - - - -///Il main serve solo a testare la classe. - - public static void main(String[] args) { - Neuron neuron1 = new Neuron(4,1); - System.out.println("con la sigmoide: "+ neuron1.generaOutput(1)); - System.out.println("con heaviside: " + neuron1.generaOutput(1)); - System.out.println("heaviside e input negativo: " + neuron1.generaOutput(-12)); - System.out.println("unitaria e input positivo: " + neuron1.generaOutput(12)); - float[] G = new float[4]; - G[0] = 0.1F; - G[1] = 0.2F; - G[2] = 0.1F; - G[3] = 2F; - neuron1.aggiornaPesi(G); - for (int i = 0; i < G.length;i++) - System.out.println(neuron1.W[i]); - } - -}//fine classe \ No newline at end of file diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/neurosolutions/LineReader.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/neurosolutions/LineReader.java deleted file mode 100644 index 45cb7a5..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/neurosolutions/LineReader.java +++ /dev/null @@ -1,88 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions; - -import java.io.*; - -// this class reads one line at a time and returns it as a string -public class LineReader -{ - String[] column; - FileInputStream fis; - BufferedInputStream bis; - - // constructor - public LineReader (String path) { - try { - fis = new FileInputStream(path); - bis = new BufferedInputStream(fis); - } - catch (IOException e) {} - /* - this.inputFile = new File(path); - try {in = new FileReader(inputFile);} catch (IOException e) {} - */ - } - - // read the next line, split it and return the parts as a string array - public boolean NextLineSplitted () { - column = null; - column = NextLine().split(";"); - if (column[0] != "#EOF#") { - for (int i = 0; i < column.length; i++) { - column[i] = column[i].trim(); - } - return true; - } - else{return false;} - } - - // read the next line, return the line as string - public String NextLine() { - int i; - char[] temp_array = new char[50000]; - char[] temp_array2; - boolean last_line; - int counter; - String temp_line = ""; - - do { - temp_array2 = null; - counter = 0; - last_line = true; - // read a line - try { - while ( (i = bis.read()) != -1 ) { - last_line = false; - if (i == 13 || i == 10) { - break; - } - else if( i != 10 && i != 13) { - temp_array[counter++] = (char)i; - } - } - } - catch (IOException e) {} - // put the array into a string - if (last_line) { - temp_line = "#EOF#"; - } - else if (counter != 0) { - temp_array2 = new char[counter]; - boolean all_spaces = true; - for (int j = 0; j < counter; j++) { - if (temp_array[j] != ' ') {all_spaces = false;} - temp_array2[j] = temp_array[j]; - } - if (all_spaces) {temp_line = "";} - else {temp_line = new String(temp_array2);} - if (temp_line.length() >= 2 && temp_line.charAt(0) == '/' && temp_line.charAt(1) == '/') { - temp_line = ""; - } - } - else { - temp_line = ""; - } - - } while (temp_line == ""); - return temp_line.trim(); - } -} diff --git a/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/neurosolutions/NeuralNet.java b/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/neurosolutions/NeuralNet.java deleted file mode 100644 index 39eff2e..0000000 --- a/src/org/gcube/dataanalysis/ecoengine/models/cores/neuralnetworks/neurosolutions/NeuralNet.java +++ /dev/null @@ -1,534 +0,0 @@ -package org.gcube.dataanalysis.ecoengine.models.cores.neuralnetworks.neurosolutions; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.Serializable; - -public class NeuralNet implements Serializable { - - Neuron[] neurons; - Synapse[] synapses; - int nolayers; // no of layers, inc. input and output layers - Layer[] layers; - private Randomizer randomizer; - - // constructor - // opens the configuration file and creates a net according to it. - public NeuralNet (String path, Randomizer randomizer) { - this.randomizer = randomizer; - LineReader linereader = new LineReader(path); - while (linereader.NextLineSplitted()){ - // if it declares # of objects, dimension the appropriate array - if (linereader.column[0].compareTo("#neurons") == 0) { neurons = new Neuron[Integer.parseInt(linereader.column[1])]; } - if (linereader.column[0].compareTo("#synapses") == 0) { synapses = new Synapse[Integer.parseInt(linereader.column[1])]; } - // if it represents an input neuron, create a neuron object - if (linereader.column[0].compareTo("i") == 0) { neurons[Integer.parseInt(linereader.column[1])] = new Neuron(Integer.parseInt(linereader.column[1])); } - // if it represents a neuron, create a neuron object - if (linereader.column[0].compareTo("n") == 0) { neurons[Integer.parseInt(linereader.column[1])] = new Neuron( Integer.parseInt(linereader.column[1]), Integer.parseInt(linereader.column[2]), Double.parseDouble(linereader.column[3]), linereader.column[4].charAt(0), Double.parseDouble(linereader.column[5]), Double.parseDouble(linereader.column[6]), randomizer ); } - // if it represents a synapse, create a synapse object - if (linereader.column[0].compareTo("s") == 0) { synapses[Integer.parseInt(linereader.column[1])] = - new Synapse( - neurons[Integer.parseInt(linereader.column[2])], - neurons[Integer.parseInt(linereader.column[3])], - randomizer - ); } - } - linereader = null; - // first find out how many layers there are - int temp_maxlayer = 0; - for (int i = 0; i < neurons.length; i++) { - if (neurons[i].layer > temp_maxlayer) {temp_maxlayer = neurons[i].layer;} - } - nolayers = temp_maxlayer+1; - // then create layer objects - layers = new Layer[nolayers]; - for (int i = 0; i < nolayers; i++) {layers[i] = new Layer(i);} - NeuronsInOut(); - } - - public static double[] preprocessObjects(Object[] vector) { - - double[] out = new double[vector.length]; - - for (int i = 0; i < vector.length; i++) { - double element = 0; - if (vector[i] != null) - element = Double.parseDouble("" + vector[i]); - - out[i] = element; - } - - return out; - } - - - public double[] getNegativeCase() { - double[] out = new double[0]; - - if (topology.length > 0) { - out = new double[topology[topology.length - 1]]; - for (int i = 0; i < out.length; i++) { - out[i] = -1f; - } - } - - return out; - } - - public double[] getPositiveCase() { - double[] out = new double[0]; - - if (topology.length > 0) { - out = new double[topology[topology.length - 1]]; - for (int i = 0; i < out.length; i++) { - out[i] = 1f; - } - } - - return out; - } - - public static int[] setupInnerLayers(int... numberOfNeurons) { - int[] layers = null; - - if (numberOfNeurons.length > 0) { - layers = new int[numberOfNeurons.length]; - - for (int i = 0; i < numberOfNeurons.length; i++) { - layers[i] = numberOfNeurons[i]; - } - } - return layers; - } - - public NeuralNet(int N, int M, int[] t) { - - Randomizer randomizer = new Randomizer(); - int[] noofneurons = null; - double[] learnratecoeff = null; - char[] axonfamily = null; - double[] momentumrate = null; - double[] flatness = null; - - int len = 2; - if (t!=null){ - len = len+t.length; - } - noofneurons = new int[len]; - learnratecoeff = new double[len]; - axonfamily = new char[len]; - momentumrate = new double[len]; - flatness = new double[len]; - - noofneurons[0] = N; - learnratecoeff[0]=1; - axonfamily[0] = 't'; - momentumrate[0] = 0; - flatness[0] = 1; - for (int i=1;i